context
stringlengths 2.52k
185k
| gt
stringclasses 1
value |
---|---|
using System.Text;
using System.Xml.Linq;
using FluentAssertions;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.CSharp;
using Microsoft.CodeAnalysis.Diagnostics;
using Microsoft.CodeAnalysis.Text;
using TestUtilities;
using Xunit;
namespace Meziantou.Framework.ResxSourceGenerator.Tests;
public class ResxGeneratorTest
{
private static async Task<(GeneratorDriverRunResult Result, byte[] Assembly)> GenerateFiles((string ResxPath, string ResxContent)[] files, OptionProvider optionProvider, bool mustCompile = true)
{
var netcoreRef = await NuGetHelpers.GetNuGetReferences("Microsoft.NETCore.App.Ref", "6.0.0", "ref/net6.0/");
var desktopRef = await NuGetHelpers.GetNuGetReferences("Microsoft.WindowsDesktop.App.Ref", "6.0.0", "ref/net6.0/");
var references = netcoreRef.Concat(desktopRef)
.Select(loc => MetadataReference.CreateFromFile(loc))
.ToArray();
var compilation = CSharpCompilation.Create("compilation",
new[] { CSharpSyntaxTree.ParseText("") },
references,
new CSharpCompilationOptions(OutputKind.DynamicallyLinkedLibrary));
var generator = new ResxGenerator();
var wrapperType = (ISourceGenerator)Activator.CreateInstance(Type.GetType("Microsoft.CodeAnalysis.IncrementalGeneratorWrapper, Microsoft.CodeAnalysis", throwOnError: true), generator);
GeneratorDriver driver = CSharpGeneratorDriver.Create(
generators: new ISourceGenerator[] { wrapperType },
additionalTexts: files.Select(file => (AdditionalText)new TestAdditionalText(file.ResxPath, file.ResxContent)).ToArray(),
optionsProvider: optionProvider);
driver = driver.RunGeneratorsAndUpdateCompilation(compilation, out var outputCompilation, out var diagnostics);
var runResult = driver.GetRunResult();
using var ms = new MemoryStream();
var result = outputCompilation.Emit(ms);
if (mustCompile)
{
var diags = string.Join("\n", result.Diagnostics);
var generated = (await runResult.GeneratedTrees[0].GetRootAsync()).ToFullString();
result.Success.Should().BeTrue("Project cannot build:\n" + diags + "\n\n\n" + generated);
result.Diagnostics.Should().BeEmpty();
}
return (runResult, ms.ToArray());
}
[Fact]
public async Task GenerateProperties()
{
var element = new XElement("root",
new XElement("data", new XAttribute("name", "Sample"), new XElement("value", "Value")),
new XElement("data", new XAttribute("name", "HelloWorld"), new XElement("value", "Hello {0}!")),
new XElement("data", new XAttribute("name", "Image1"), new XAttribute("type", "System.Resources.ResXFileRef, System.Windows.Forms"), new XElement("value", @"Resources\Image1.png;System.Drawing.Bitmap, System.Drawing, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a"))
);
var (result, _) = await GenerateFiles(new[] { ("test.resx", element.ToString()) }, new OptionProvider
{
Namespace = "test",
ResourceName = "test",
});
result.Diagnostics.Should().BeEmpty();
result.GeneratedTrees.Should().ContainSingle();
Path.GetFileName(result.GeneratedTrees[0].FilePath).Should().Be("test.resx.g.cs");
var fileContent = (await result.GeneratedTrees[0].GetRootAsync()).ToFullString();
fileContent.Should().Contain("Sample");
fileContent.Should().NotContain("FormatSample");
fileContent.Should().Contain("HelloWorld\n");
fileContent.Should().Contain("FormatHelloWorld(object? arg0)");
fileContent.Should().Contain("public static global::System.Drawing.Bitmap? @Image1");
}
[Fact]
public async Task GeneratePropertiesFromMultipleResx()
{
var element1 = new XElement("root",
new XElement("data", new XAttribute("name", "Sample"), new XElement("value", "Value")),
new XElement("data", new XAttribute("name", "HelloWorld"), new XElement("value", "Hello {0}!"))
);
var element2 = new XElement("root",
new XElement("data", new XAttribute("name", "Sample"), new XElement("value", "Value")),
new XElement("data", new XAttribute("name", "HelloWorld2"), new XElement("value", "Hello {0}!"))
);
var element3 = new XElement("root",
new XElement("data", new XAttribute("name", "AAA"), new XElement("value", "Value"))
);
var element4 = new XElement("root",
new XElement("data", new XAttribute("name", "BBB"), new XElement("value", "Value"))
);
var (result, assembly) = await GenerateFiles(new (string, string)[]
{
(FullPath.GetTempPath() / "test.resx", element1.ToString()),
(FullPath.GetTempPath() / "test.en.resx", element2.ToString()),
(FullPath.GetTempPath() / "test.fr-FR.resx", element3.ToString()),
(FullPath.GetTempPath() / "test.NewResource.fr.resx", element4.ToString()),
}, new OptionProvider
{
ProjectDir = FullPath.GetTempPath(),
RootNamespace = "Test",
});
result.GeneratedTrees.OrderBy(t => t.FilePath).Should().SatisfyRespectively(tree =>
{
var fileContent = tree.GetRoot().ToFullString();
Path.GetFileName(tree.FilePath).Should().Be("test.NewResource.resx.g.cs");
fileContent.Should().Contain("BBB");
}, tree =>
{
var fileContent = tree.GetRoot().ToFullString();
Path.GetFileName(tree.FilePath).Should().Be("test.resx.g.cs");
fileContent.Should().Contain("Sample");
fileContent.Should().Contain("HelloWorld");
fileContent.Should().Contain("AAA");
});
}
[Fact]
public async Task ComputeNamespace_RootDir()
{
var (result, _) = await GenerateFiles(new (string, string)[] { (FullPath.GetTempPath() / "dir" / "proj" / "test.resx", new XElement("root").ToString()) }, new OptionProvider
{
ProjectDir = FullPath.GetTempPath() / "dir" / "proj",
RootNamespace = "proj",
});
result.Diagnostics.Should().BeEmpty();
var fileContent = (await result.GeneratedTrees[0].GetRootAsync()).ToFullString();
fileContent.Should().Contain("namespace proj" + Environment.NewLine);
}
[Fact]
public async Task ComputeNamespace_SubFolder()
{
var (result, _) = await GenerateFiles(new (string, string)[] { (FullPath.GetTempPath() / "dir" / "proj" / "A" / "test.resx", new XElement("root").ToString()) }, new OptionProvider
{
ProjectDir = FullPath.GetTempPath() / "dir" / "proj",
RootNamespace = "proj",
});
var fileContent = (await result.GeneratedTrees[0].GetRootAsync()).ToFullString();
fileContent.Should().Contain("namespace proj.A" + Environment.NewLine);
}
[Fact]
public async Task WrongResx_Warning()
{
var (result, _) = await GenerateFiles(new[] { ("test.resx", "invalid xml") }, new OptionProvider
{
ResourceName = "resource",
Namespace = "test",
}, mustCompile: false);
result.Diagnostics.Should().SatisfyRespectively(diag => diag.Id.Should().Be("MFRG0001"));
}
private sealed class OptionProvider : AnalyzerConfigOptionsProvider
{
public string ProjectDir { get; set; }
public string RootNamespace { get; set; }
public string Namespace { get; set; }
public string ClassName { get; set; }
public string ResourceName { get; set; }
public override AnalyzerConfigOptions GlobalOptions => new Options(this);
public override AnalyzerConfigOptions GetOptions(SyntaxTree tree) => new Options(this);
public override AnalyzerConfigOptions GetOptions(AdditionalText textFile) => new Options(this);
private sealed class Options : AnalyzerConfigOptions
{
private readonly OptionProvider _optionProvider;
public Options(OptionProvider optionProvider) => _optionProvider = optionProvider;
public override bool TryGetValue(string key, [NotNullWhen(true)] out string value)
{
const string BuildMetadata = "build_metadata.AdditionalFiles.";
const string BuildProperties = "build_property.";
if (key.StartsWith(BuildMetadata, StringComparison.Ordinal))
{
key = key[BuildMetadata.Length..];
}
else if (key.StartsWith(BuildProperties, StringComparison.Ordinal))
{
key = key[BuildProperties.Length..];
}
else
{
value = null;
return false;
}
switch (key)
{
case "RootNamespace":
if (_optionProvider.RootNamespace != null)
{
value = _optionProvider.RootNamespace;
return true;
}
break;
case "ProjectDir":
if (_optionProvider.ProjectDir != null)
{
value = _optionProvider.ProjectDir;
return true;
}
break;
case "Namespace":
if (_optionProvider.Namespace != null)
{
value = _optionProvider.Namespace;
return true;
}
break;
case "ResourceName":
if (_optionProvider.ResourceName != null)
{
value = _optionProvider.ResourceName;
return true;
}
break;
case "ClassName":
if (_optionProvider.ClassName != null)
{
value = _optionProvider.ClassName;
return true;
}
break;
}
value = null;
return false;
}
}
}
private sealed class TestAdditionalText : AdditionalText
{
private readonly SourceText _text;
public TestAdditionalText(string path, SourceText text)
{
Path = path;
_text = text;
}
public TestAdditionalText(string path, string text, Encoding encoding = null)
: this(path, SourceText.From(text, encoding))
{
}
public override string Path { get; }
public override SourceText GetText(CancellationToken cancellationToken = default) => _text;
}
}
| |
#region PDFsharp - A .NET library for processing PDF
//
// Authors:
// Stefan Lange
//
// Copyright (c) 2005-2016 empira Software GmbH, Cologne Area (Germany)
//
// http://www.pdfsharp.com
// http://sourceforge.net/projects/pdfsharp
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
#endregion
using System;
using System.Diagnostics;
namespace PdfSharp.Pdf.Filters
{
/// <summary>
/// Applies standard filters to streams.
/// </summary>
public static class Filtering
{
/// <summary>
/// Gets the filter specified by the case sensitive name.
/// </summary>
public static Filter GetFilter(string filterName)
{
if (filterName.StartsWith("/"))
filterName = filterName.Substring(1);
// Some tools use abbreviations
switch (filterName)
{
case "ASCIIHexDecode":
case "AHx":
return _asciiHexDecode ?? (_asciiHexDecode = new AsciiHexDecode());
case "ASCII85Decode":
case "A85":
return _ascii85Decode ?? (_ascii85Decode = new Ascii85Decode());
case "LZWDecode":
case "LZW":
return _lzwDecode ?? (_lzwDecode = new LzwDecode());
case "FlateDecode":
case "Fl":
return _flateDecode ?? (_flateDecode = new FlateDecode());
//case "RunLengthDecode":
// if (RunLengthDecode == null)
// RunLengthDecode = new RunLengthDecode();
// return RunLengthDecode;
//
//case "CCITTFaxDecode":
// if (CCITTFaxDecode == null)
// CCITTFaxDecode = new CCITTFaxDecode();
// return CCITTFaxDecode;
//
//case "JBIG2Decode":
// if (JBIG2Decode == null)
// JBIG2Decode = new JBIG2Decode();
// return JBIG2Decode;
//
//case "DCTDecode":
// if (DCTDecode == null)
// DCTDecode = new DCTDecode();
// return DCTDecode;
//
//case "JPXDecode":
// if (JPXDecode == null)
// JPXDecode = new JPXDecode();
// return JPXDecode;
//
//case "Crypt":
// if (Crypt == null)
// Crypt = new Crypt();
// return Crypt;
case "RunLengthDecode":
case "CCITTFaxDecode":
case "JBIG2Decode":
case "DCTDecode":
case "JPXDecode":
case "Crypt":
Debug.WriteLine("Filter not implemented: " + filterName);
return null;
}
throw new NotImplementedException("Unknown filter: " + filterName);
}
/// <summary>
/// Gets the filter singleton.
/// </summary>
// ReSharper disable InconsistentNaming
public static AsciiHexDecode ASCIIHexDecode
// ReSharper restore InconsistentNaming
{
get { return _asciiHexDecode ?? (_asciiHexDecode = new AsciiHexDecode()); }
}
static AsciiHexDecode _asciiHexDecode;
/// <summary>
/// Gets the filter singleton.
/// </summary>
public static Ascii85Decode ASCII85Decode
{
get { return _ascii85Decode ?? (_ascii85Decode = new Ascii85Decode()); }
}
static Ascii85Decode _ascii85Decode;
/// <summary>
/// Gets the filter singleton.
/// </summary>
public static LzwDecode LzwDecode
{
get { return _lzwDecode ?? (_lzwDecode = new LzwDecode()); }
}
static LzwDecode _lzwDecode;
/// <summary>
/// Gets the filter singleton.
/// </summary>
public static FlateDecode FlateDecode
{
get { return _flateDecode ?? (_flateDecode = new FlateDecode()); }
}
static FlateDecode _flateDecode;
//runLengthDecode
//ccittFaxDecode
//jbig2Decode
//dctDecode
//jpxDecode
//crypt
/// <summary>
/// Encodes the data with the specified filter.
/// </summary>
public static byte[] Encode(byte[] data, string filterName)
{
Filter filter = GetFilter(filterName);
if (filter != null)
return filter.Encode(data);
return null;
}
/// <summary>
/// Encodes a raw string with the specified filter.
/// </summary>
public static byte[] Encode(string rawString, string filterName)
{
Filter filter = GetFilter(filterName);
if (filter != null)
return filter.Encode(rawString);
return null;
}
/// <summary>
/// Decodes the data with the specified filter.
/// </summary>
public static byte[] Decode(byte[] data, string filterName, FilterParms parms)
{
Filter filter = GetFilter(filterName);
if (filter != null)
return filter.Decode(data, parms);
return null;
}
/// <summary>
/// Decodes the data with the specified filter.
/// </summary>
public static byte[] Decode(byte[] data, string filterName)
{
Filter filter = GetFilter(filterName);
if (filter != null)
return filter.Decode(data, null);
return null;
}
/// <summary>
/// Decodes the data with the specified filter.
/// </summary>
public static byte[] Decode(byte[] data, PdfItem filterItem)
{
byte[] result = null;
if (filterItem is PdfName)
{
Filter filter = GetFilter(filterItem.ToString());
if (filter != null)
result = filter.Decode(data);
}
else if (filterItem is PdfArray)
{
PdfArray array = (PdfArray)filterItem;
foreach (PdfItem item in array)
data = Decode(data, item);
result = data;
}
return result;
}
/// <summary>
/// Decodes to a raw string with the specified filter.
/// </summary>
public static string DecodeToString(byte[] data, string filterName, FilterParms parms)
{
Filter filter = GetFilter(filterName);
if (filter != null)
return filter.DecodeToString(data, parms);
return null;
}
/// <summary>
/// Decodes to a raw string with the specified filter.
/// </summary>
public static string DecodeToString(byte[] data, string filterName)
{
Filter filter = GetFilter(filterName);
if (filter != null)
return filter.DecodeToString(data, null);
return null;
}
}
}
| |
using System;
using System.Collections;
using System.IO;
using Raksha.Asn1;
using Raksha.Asn1.Ocsp;
using Raksha.Asn1.X509;
using Raksha.Crypto;
using Raksha.Security;
using Raksha.Security.Certificates;
using Raksha.Utilities;
using Raksha.X509;
using Raksha.X509.Store;
namespace Raksha.Ocsp
{
/// <remarks>
/// <code>
/// BasicOcspResponse ::= SEQUENCE {
/// tbsResponseData ResponseData,
/// signatureAlgorithm AlgorithmIdentifier,
/// signature BIT STRING,
/// certs [0] EXPLICIT SEQUENCE OF Certificate OPTIONAL
/// }
/// </code>
/// </remarks>
public class BasicOcspResp
: X509ExtensionBase
{
private readonly BasicOcspResponse resp;
private readonly ResponseData data;
// private readonly X509Certificate[] chain;
public BasicOcspResp(
BasicOcspResponse resp)
{
this.resp = resp;
this.data = resp.TbsResponseData;
}
/// <returns>The DER encoding of the tbsResponseData field.</returns>
/// <exception cref="OcspException">In the event of an encoding error.</exception>
public byte[] GetTbsResponseData()
{
try
{
return data.GetDerEncoded();
}
catch (IOException e)
{
throw new OcspException("problem encoding tbsResponseData", e);
}
}
public int Version
{
get { return data.Version.Value.IntValue + 1; }
}
public RespID ResponderId
{
get { return new RespID(data.ResponderID); }
}
public DateTime ProducedAt
{
get { return data.ProducedAt.ToDateTime(); }
}
public SingleResp[] Responses
{
get
{
Asn1Sequence s = data.Responses;
SingleResp[] rs = new SingleResp[s.Count];
for (int i = 0; i != rs.Length; i++)
{
rs[i] = new SingleResp(SingleResponse.GetInstance(s[i]));
}
return rs;
}
}
public X509Extensions ResponseExtensions
{
get { return data.ResponseExtensions; }
}
protected override X509Extensions GetX509Extensions()
{
return ResponseExtensions;
}
public string SignatureAlgName
{
get { return OcspUtilities.GetAlgorithmName(resp.SignatureAlgorithm.ObjectID); }
}
public string SignatureAlgOid
{
get { return resp.SignatureAlgorithm.ObjectID.Id; }
}
[Obsolete("RespData class is no longer required as all functionality is available on this class")]
public RespData GetResponseData()
{
return new RespData(data);
}
public byte[] GetSignature()
{
return resp.Signature.GetBytes();
}
private IList GetCertList()
{
// load the certificates and revocation lists if we have any
IList certs = Platform.CreateArrayList();
Asn1Sequence s = resp.Certs;
if (s != null)
{
foreach (Asn1Encodable ae in s)
{
try
{
certs.Add(new X509CertificateParser().ReadCertificate(ae.GetEncoded()));
}
catch (IOException ex)
{
throw new OcspException("can't re-encode certificate!", ex);
}
catch (CertificateException ex)
{
throw new OcspException("can't re-encode certificate!", ex);
}
}
}
return certs;
}
public X509Certificate[] GetCerts()
{
IList certs = GetCertList();
X509Certificate[] result = new X509Certificate[certs.Count];
for (int i = 0; i < certs.Count; ++i)
{
result[i] = (X509Certificate)certs[i];
}
return result;
}
/// <returns>The certificates, if any, associated with the response.</returns>
/// <exception cref="OcspException">In the event of an encoding error.</exception>
public IX509Store GetCertificates(
string type)
{
try
{
return X509StoreFactory.Create(
"Certificate/" + type,
new X509CollectionStoreParameters(this.GetCertList()));
}
catch (Exception e)
{
throw new OcspException("can't setup the CertStore", e);
}
}
/// <summary>
/// Verify the signature against the tbsResponseData object we contain.
/// </summary>
public bool Verify(
AsymmetricKeyParameter publicKey)
{
try
{
ISigner signature = SignerUtilities.GetSigner(this.SignatureAlgName);
signature.Init(false, publicKey);
byte[] bs = data.GetDerEncoded();
signature.BlockUpdate(bs, 0, bs.Length);
return signature.VerifySignature(this.GetSignature());
}
catch (Exception e)
{
throw new OcspException("exception processing sig: " + e, e);
}
}
/// <returns>The ASN.1 encoded representation of this object.</returns>
public byte[] GetEncoded()
{
return resp.GetEncoded();
}
public override bool Equals(
object obj)
{
if (obj == this)
return true;
BasicOcspResp other = obj as BasicOcspResp;
if (other == null)
return false;
return resp.Equals(other.resp);
}
public override int GetHashCode()
{
return resp.GetHashCode();
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Globalization;
using System.Collections;
/// <summary>
/// Copy(System.Array,System.Array,System.Int32)
/// </summary>
public class ArrayCopy1
{
const int c_MaxValue = 10;
const int c_MinValue = 0;
public static int Main()
{
ArrayCopy1 ArrayCopy1 = new ArrayCopy1();
TestLibrary.TestFramework.BeginTestCase("ArrayCopy1");
if (ArrayCopy1.RunTests())
{
TestLibrary.TestFramework.EndTestCase();
TestLibrary.TestFramework.LogInformation("PASS");
return 100;
}
else
{
TestLibrary.TestFramework.EndTestCase();
TestLibrary.TestFramework.LogInformation("FAIL");
return 0;
}
}
public bool RunTests()
{
bool retVal = true;
TestLibrary.TestFramework.LogInformation("[Positive]");
retVal = PosTest1() && retVal;
retVal = PosTest2() && retVal;
retVal = PosTest3() && retVal;
retVal = PosTest4() && retVal;
TestLibrary.TestFramework.LogInformation("[Negative]");
retVal = NegTest1() && retVal;
retVal = NegTest2() && retVal;
retVal = NegTest3() && retVal;
retVal = NegTest4() && retVal;
retVal = NegTest5() && retVal;
retVal = NegTest6() && retVal;
retVal = NegTest7() && retVal;
retVal = NegTest8() && retVal;
return retVal;
}
// Returns true if the expected result is right
// Returns false if the expected result is wrong
public bool PosTest1()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("PosTest1:Copies a range of elements from an Array starting at the first element and pastes them into another Array starting at the first element,the two array have the same value type.");
try
{
Array myOriginalArray = Array.CreateInstance(typeof(Int32), c_MaxValue);
Array myTargetArray = Array.CreateInstance(typeof(Int32), c_MaxValue*2);
for(int i=0;i<c_MaxValue;i++)
{
myOriginalArray.SetValue(i, i);
myTargetArray.SetValue(i + c_MaxValue, i);
}
Array.Copy(myOriginalArray, myTargetArray, c_MaxValue);
int index = 0;
for (IEnumerator itr = myOriginalArray.GetEnumerator(); itr.MoveNext(); )
{
object current = itr.Current ;
if (!current.Equals(myTargetArray.GetValue(index)))
{
TestLibrary.TestFramework.LogError("001", "Copy error");
retVal = false;
break;
}
index++;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("002", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
// Returns true if the expected result is right
// Returns false if the expected result is wrong
public bool PosTest2()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("PosTest2:Copies a range of elements from an Array starting at the first element and pastes them into another Array starting at the first element,the two array have the same reference type.");
try
{
Array myOriginalArray = Array.CreateInstance(typeof(string), c_MaxValue);
Array myTargetArray = Array.CreateInstance(typeof(string), c_MaxValue * 2);
string generator1 = string.Empty;
string generator2 = string.Empty;
for (int i = 0; i < c_MaxValue; i++)
{
generator1 = TestLibrary.Generator.GetString(-55, true, c_MinValue, c_MaxValue);
generator2 = TestLibrary.Generator.GetString(-55, true, c_MinValue, c_MaxValue);
myOriginalArray.SetValue(generator1, i);
myTargetArray.SetValue(generator2, i);
}
Array.Copy(myOriginalArray, myTargetArray, c_MaxValue);
int index = 0;
for (IEnumerator itr = myOriginalArray.GetEnumerator(); itr.MoveNext(); )
{
object current = itr.Current;
if (!current.Equals(myTargetArray.GetValue(index)))
{
TestLibrary.TestFramework.LogError("003", "Copy error");
retVal = false;
break;
}
index++;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("004", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
// Returns true if the expected result is right
// Returns false if the expected result is wrong
public bool PosTest3()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("PosTest3:Copies a range of elements from an Array starting at the first element and pastes them into another Array starting at the first element,the two array can upcast.");
try
{
Array myOriginalArray = Array.CreateInstance(typeof(TestDeriveClass), c_MaxValue);
Array myTargetArray = Array.CreateInstance(typeof(TestBaseClass), c_MaxValue * 2);
TestDeriveClass generator1;
TestBaseClass generator2 ;
for (int i = 0; i < c_MaxValue; i++)
{
generator1 = new TestDeriveClass(i);
generator2 = new TestDeriveClass(i + c_MaxValue);
myOriginalArray.SetValue(generator1, i);
myTargetArray.SetValue(generator2, i);
}
Array.Copy(myOriginalArray, myTargetArray, c_MaxValue);
int index = 0;
for (IEnumerator itr = myOriginalArray.GetEnumerator(); itr.MoveNext(); )
{
object current = itr.Current;
if (!current.Equals(myTargetArray.GetValue(index)))
{
TestLibrary.TestFramework.LogError("005", "Copy error");
retVal = false;
break;
}
index++;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("006", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
// Returns true if the expected result is right
// Returns false if the expected result is wrong
public bool PosTest4()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("PosTest4:Copies a range of elements from an Array starting at the first element and pastes them into another Array starting at the first element,the two array can boxing.");
try
{
Array myOriginalArray = Array.CreateInstance(typeof(Int32), c_MaxValue);
Array myTargetArray = Array.CreateInstance(typeof(Object), c_MaxValue * 2);
int generator1;
object generator2;
for (int i = 0; i < c_MaxValue; i++)
{
generator1 =i;
generator2 = i+c_MaxValue;
myOriginalArray.SetValue(generator1, i);
myTargetArray.SetValue(generator2, i);
}
Array.Copy(myOriginalArray, myTargetArray, c_MaxValue);
int index = 0;
for (IEnumerator itr = myOriginalArray.GetEnumerator(); itr.MoveNext(); )
{
object current = itr.Current;
if (!current.Equals(myTargetArray.GetValue(index)))
{
TestLibrary.TestFramework.LogError("007", "Copy error");
retVal = false;
break;
}
index++;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("008", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
// Returns true if the expected result is right
// Returns false if the expected result is wrong
public bool NegTest1()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("NegTest1:sourceArray is a null reference.");
try
{
Array myOriginalArray = Array.CreateInstance(typeof(Int32), c_MaxValue);
Array myTargetArray = Array.CreateInstance(typeof(Int32), c_MaxValue * 2);
for (int i = 0; i < c_MaxValue; i++)
{
myOriginalArray.SetValue(i, i);
myTargetArray.SetValue(i + c_MaxValue, i);
}
myOriginalArray = null;
Array.Copy(myOriginalArray, myTargetArray, c_MaxValue);
TestLibrary.TestFramework.LogError("009", "Copy error");
retVal = false;
}
catch (ArgumentNullException)
{
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("010", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
// Returns true if the expected result is right
// Returns false if the expected result is wrong
public bool NegTest2()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("NegTest2:destinationArray is a null reference .");
try
{
Array myOriginalArray = Array.CreateInstance(typeof(Int32), c_MaxValue);
Array myTargetArray = Array.CreateInstance(typeof(Int32), c_MaxValue * 2);
for (int i = 0; i < c_MaxValue; i++)
{
myOriginalArray.SetValue(i, i);
myTargetArray.SetValue(i + c_MaxValue, i);
}
myTargetArray = null;
Array.Copy(myOriginalArray, myTargetArray, c_MaxValue);
TestLibrary.TestFramework.LogError("011", "Copy error");
retVal = false;
}
catch (ArgumentNullException)
{
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("012", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
// Returns true if the expected result is right
// Returns false if the expected result is wrong
public bool NegTest3()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("NegTest3:sourceArray and destinationArray have different ranks.");
try
{
Array myOriginalArray = Array.CreateInstance(typeof(Int32), c_MaxValue);
int[] parameter ={ c_MaxValue, c_MaxValue };
Array myTargetArray = Array.CreateInstance(typeof(Int32), parameter);
Array.Copy(myOriginalArray, myTargetArray, c_MaxValue);
TestLibrary.TestFramework.LogError("013", "Copy error");
retVal = false;
}
catch (RankException)
{
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("014", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
// Returns true if the expected result is right
// Returns false if the expected result is wrong
public bool NegTest4()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("NegTest4:sourceArray and destinationArray are of incompatible types.");
try
{
Array myOriginalArray = Array.CreateInstance(typeof(Int32), c_MaxValue);
Array myTargetArray = Array.CreateInstance(typeof(string), c_MaxValue * 2);
Array.Copy(myOriginalArray, myTargetArray, c_MaxValue);
TestLibrary.TestFramework.LogError("015", "Copy error");
retVal = false;
}
catch (ArrayTypeMismatchException)
{
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("016", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
// Returns true if the expected result is right
// Returns false if the expected result is wrong
public bool NegTest5()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("NegTest5:At least one element in sourceArray cannot be cast to the type of destinationArray.");
try
{
Array myOriginalArray = Array.CreateInstance(typeof(ITestInterface), c_MaxValue);
Array myTargetArray = Array.CreateInstance(typeof(TestBaseClass), c_MaxValue * 2);
TestDeriveClass generator1;
TestBaseClass generator2;
TestDeriveClass1 generator3=new TestDeriveClass1(100);
for (int i = 0; i < c_MaxValue; i++)
{
generator1 = new TestDeriveClass(i);
generator2 = new TestDeriveClass(i + c_MaxValue);
myOriginalArray.SetValue(generator1, i);
myTargetArray.SetValue(generator2, i);
}
myOriginalArray.SetValue(generator3, c_MaxValue - 1);
Array.Copy(myOriginalArray, myTargetArray, c_MaxValue);
TestLibrary.TestFramework.LogError("017", "Copy error");
retVal = false;
}
catch (InvalidCastException)
{
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("018", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
// Returns true if the expected result is right
// Returns false if the expected result is wrong
public bool NegTest6()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("NegTest6:length is less than zero.");
try
{
Array myOriginalArray = Array.CreateInstance(typeof(Int32), c_MaxValue);
Array myTargetArray = Array.CreateInstance(typeof(Int32), c_MaxValue * 2);
for (int i = 0; i < c_MaxValue; i++)
{
myOriginalArray.SetValue(i, i);
myTargetArray.SetValue(i + c_MaxValue, i);
}
Array.Copy(myOriginalArray, myTargetArray, c_MinValue-1);
TestLibrary.TestFramework.LogError("019", "Copy error");
retVal = false;
}
catch (ArgumentOutOfRangeException)
{
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("020", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
// Returns true if the expected result is right
// Returns false if the expected result is wrong
public bool NegTest7()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("NegTest7:length is greater than the number of elements in sourceArray.");
try
{
Array myOriginalArray = Array.CreateInstance(typeof(Int32), c_MaxValue);
Array myTargetArray = Array.CreateInstance(typeof(Int32), c_MaxValue * 2);
for (int i = 0; i < c_MaxValue; i++)
{
myOriginalArray.SetValue(i, i);
myTargetArray.SetValue(i + c_MaxValue, i);
}
Array.Copy(myOriginalArray, myTargetArray, c_MaxValue+1);
TestLibrary.TestFramework.LogError("021", "Copy error");
retVal = false;
}
catch (ArgumentException)
{
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("022", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
// Returns true if the expected result is right
// Returns false if the expected result is wrong
public bool NegTest8()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("NegTest8:length is greater than the number of elements in destinationArray.");
try
{
Array myOriginalArray = Array.CreateInstance(typeof(Int32), c_MaxValue);
Array myTargetArray = Array.CreateInstance(typeof(Int32), c_MaxValue * 2);
for (int i = 0; i < c_MaxValue; i++)
{
myOriginalArray.SetValue(i, i);
myTargetArray.SetValue(i + c_MaxValue, i);
}
Array.Copy(myOriginalArray, myTargetArray, c_MaxValue * 2+1);
TestLibrary.TestFramework.LogError("023", "Copy error");
retVal = false;
}
catch (ArgumentException)
{
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("024", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
}
//create ITestInterface for Negtest5.
interface ITestInterface
{
string GetName();
}
//create TestBaseClass for provding test method and test target.
public abstract class TestBaseClass
{
// The value holder
protected int id;
public TestBaseClass(int Id)
{
id = Id;
}
protected int m_value;
protected abstract int GetValue();
}
//create TestDeriveClass for provding test method and test source.
public class TestDeriveClass : TestBaseClass, ITestInterface
{
int deriveId;
public TestDeriveClass(int Id)
: base(Id)
{
deriveId = Id;
}
protected override int GetValue()
{
return deriveId;
}
#region ITestInterface Members
public string GetName()
{
return "TestDeriveClass";
}
#endregion
}
//create TestDeriveClass for provding test method and test source.
public class TestDeriveClass1 : ITestInterface
{
int deriveId;
public TestDeriveClass1(int Id)
{
deriveId = Id;
}
protected int GetValue()
{
return deriveId;
}
#region ITestInterface Members
public string GetName()
{
return "TestDeriveClass1";
}
#endregion
}
| |
// <copyright file="ReaderWriterSynchronizedBase.cs" company="Adrian Mos">
// Copyright (c) Adrian Mos with all rights reserved. Part of the IX Framework.
// </copyright>
using System;
using System.Runtime.Serialization;
using System.Threading;
using IX.StandardExtensions.ComponentModel;
using IX.StandardExtensions.Contracts;
using IX.System.Threading;
using JetBrains.Annotations;
using ReaderWriterLockSlim = IX.System.Threading.ReaderWriterLockSlim;
namespace IX.StandardExtensions.Threading;
/// <summary>
/// A base class for a reader/writer synchronized class.
/// </summary>
/// <seealso cref="IX.StandardExtensions.ComponentModel.DisposableBase" />
[DataContract(Namespace = Constants.DataContractNamespace)]
[PublicAPI]
public abstract partial class ReaderWriterSynchronizedBase : DisposableBase
{
#region Internal state
private readonly bool lockInherited;
private IReaderWriterLock locker;
[DataMember]
private TimeSpan lockerTimeout;
#endregion
#region Constructors and destructors
/// <summary>
/// Initializes a new instance of the <see cref="ReaderWriterSynchronizedBase" /> class.
/// </summary>
protected ReaderWriterSynchronizedBase()
{
this.locker = new ReaderWriterLockSlim();
this.lockerTimeout = EnvironmentSettings.LockAcquisitionTimeout;
}
/// <summary>
/// Initializes a new instance of the <see cref="ReaderWriterSynchronizedBase" /> class.
/// </summary>
/// <param name="locker">The locker.</param>
/// <exception cref="ArgumentNullException">
/// <paramref name="locker" />
/// is <see langword="null" /> (<see langword="Nothing" /> in Visual Basic).
/// </exception>
protected ReaderWriterSynchronizedBase(IReaderWriterLock? locker)
{
Requires.NotNull(
out this.locker,
locker);
this.lockInherited = true;
this.lockerTimeout = EnvironmentSettings.LockAcquisitionTimeout;
}
/// <summary>
/// Initializes a new instance of the <see cref="ReaderWriterSynchronizedBase" /> class.
/// </summary>
/// <param name="timeout">The lock timeout duration.</param>
protected ReaderWriterSynchronizedBase(TimeSpan timeout)
{
this.locker = new ReaderWriterLockSlim();
this.lockerTimeout = timeout;
}
/// <summary>
/// Initializes a new instance of the <see cref="ReaderWriterSynchronizedBase" /> class.
/// </summary>
/// <param name="locker">The locker.</param>
/// <param name="timeout">The lock timeout duration.</param>
/// <exception cref="ArgumentNullException">
/// <paramref name="locker" />
/// is <see langword="null" /> (<see langword="Nothing" /> in Visual Basic).
/// </exception>
protected ReaderWriterSynchronizedBase(
IReaderWriterLock locker,
TimeSpan timeout)
{
Requires.NotNull(
out this.locker,
locker);
this.lockInherited = true;
this.lockerTimeout = timeout;
}
#endregion
#region Methods
/// <summary>
/// Called when the object is being deserialized, in order to set the locker to a new value.
/// </summary>
/// <param name="context">The streaming context.</param>
[OnDeserializing]
internal void OnDeserializingMethod(StreamingContext context) =>
Interlocked.Exchange(
ref this.locker,
new ReaderWriterLockSlim());
#region Disposable
/// <summary>
/// Disposes in the managed context.
/// </summary>
protected override void DisposeManagedContext()
{
if (!this.lockInherited)
{
this.locker.Dispose();
}
base.DisposeManagedContext();
}
#endregion
/// <summary>
/// Produces a reader lock in concurrent collections.
/// </summary>
/// <returns>A disposable object representing the lock.</returns>
protected ReadOnlySynchronizationLocker ReadLock()
{
this.RequiresNotDisposed();
return new ReadOnlySynchronizationLocker(
this.locker,
this.lockerTimeout);
}
/// <summary>
/// Invokes using a reader lock.
/// </summary>
/// <param name="action">An action that is called.</param>
protected void ReadLock(Action action)
{
this.RequiresNotDisposed();
Action localAction = Requires.NotNull(action);
using (new ReadOnlySynchronizationLocker(
this.locker,
this.lockerTimeout))
{
localAction();
}
}
/// <summary>
/// Gets a result from an invoker using a reader lock.
/// </summary>
/// <param name="action">An action that is called to get the result.</param>
/// <typeparam name="T">The type of the object to return.</typeparam>
/// <returns>A disposable object representing the lock.</returns>
protected T ReadLock<T>(Func<T> action)
{
this.RequiresNotDisposed();
Func<T> localAction = Requires.NotNull(action);
using (new ReadOnlySynchronizationLocker(
this.locker,
this.lockerTimeout))
{
return localAction();
}
}
/// <summary>
/// Produces a writer lock in concurrent collections.
/// </summary>
/// <returns>A disposable object representing the lock.</returns>
protected WriteOnlySynchronizationLocker WriteLock()
{
this.RequiresNotDisposed();
return new WriteOnlySynchronizationLocker(
this.locker,
this.lockerTimeout);
}
/// <summary>
/// Invokes using a writer lock.
/// </summary>
/// <param name="action">An action that is called.</param>
protected void WriteLock(Action action)
{
this.RequiresNotDisposed();
Action localAction = Requires.NotNull(action);
using (new WriteOnlySynchronizationLocker(
this.locker,
this.lockerTimeout))
{
localAction();
}
}
/// <summary>
/// Invokes using a writer lock.
/// </summary>
/// <typeparam name="T">The type of item to return.</typeparam>
/// <param name="action">An action that is called.</param>
/// <returns>The generated item.</returns>
protected T WriteLock<T>(Func<T> action)
{
this.RequiresNotDisposed();
Func<T> localAction = Requires.NotNull(action);
using (new WriteOnlySynchronizationLocker(
this.locker,
this.lockerTimeout))
{
return localAction();
}
}
/// <summary>
/// Produces an upgradeable reader lock in concurrent collections.
/// </summary>
/// <returns>A disposable object representing the lock.</returns>
protected ReadWriteSynchronizationLocker ReadWriteLock()
{
this.RequiresNotDisposed();
return new ReadWriteSynchronizationLocker(
this.locker,
this.lockerTimeout);
}
#endregion
}
| |
using CrystalDecisions.CrystalReports.Engine;
using CrystalDecisions.Windows.Forms;
using DpSdkEngLib;
using DPSDKOPSLib;
using Microsoft.VisualBasic;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Drawing;
using System.Diagnostics;
using System.Windows.Forms;
using System.Linq;
using System.Xml.Linq;
// ERROR: Not supported in C#: OptionDeclaration
namespace _4PosBackOffice.NET
{
internal partial class frmKeyboardList : System.Windows.Forms.Form
{
string gFilter;
ADODB.Recordset gRS;
string gFilterSQL;
int gID;
short gSection;
bool gAll;
private void loadLanguage()
{
//frmKeyboardList = No Code [Select a Keyboard]
//rsLang.filter = "LanguageLayoutLnk_LanguageID=" & 0000
//If rsLang.RecordCount Then frmKeyboard.Caption = rsLang("LanguageLayoutLnk_Description"): frmKeyboard.RightToLeft = rsLang("LanguageLayoutLnk_RightTL")
modRecordSet.rsLang.filter = "LanguageLayoutLnk_LanguageID=" + 1080;
//Search|Checked
if (modRecordSet.rsLang.RecordCount){lbl.Text = modRecordSet.rsLang.Fields("LanguageLayoutLnk_Description").Value;lbl.RightToLeft = modRecordSet.rsLang.Fields("LanguageLayoutLnk_RightTL").Value;}
modRecordSet.rsLang.filter = "LanguageLayoutLnk_LanguageID=" + 1065;
//New|Checked
if (modRecordSet.rsLang.RecordCount){cmdNew.Text = modRecordSet.rsLang.Fields("LanguageLayoutLnk_Description").Value;cmdNew.RightToLeft = modRecordSet.rsLang.Fields("LanguageLayoutLnk_RightTL").Value;}
modRecordSet.rsLang.filter = "LanguageLayoutLnk_LanguageID=" + 1004;
//Exit|Checked
if (modRecordSet.rsLang.RecordCount){cmdExit.Text = modRecordSet.rsLang.Fields("LanguageLayoutLnk_Description").Value;cmdExit.RightToLeft = modRecordSet.rsLang.Fields("LanguageLayoutLnk_RightTL").Value;}
modRecordSet.rsHelp.filter = "Help_Section=0 AND Help_Form='" + this.Name + "'";
//UPGRADE_ISSUE: Form property frmKeyboardList.ToolTip1 was not upgraded. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="CC4C7EC0-C903-48FC-ACCC-81861D12DA4A"'
if (modRecordSet.rsHelp.RecordCount)
this.ToolTip1 = modRecordSet.rsHelp.Fields("Help_ContextID").Value;
}
public void loadItem(ref short section)
{
gSection = section;
if (gSection)
cmdNew.Visible = false;
doSearch();
loadLanguage();
this.ShowDialog();
}
public int getItem()
{
cmdNew.Visible = false;
loadLanguage();
this.ShowDialog();
return gID;
}
private void getNamespace()
{
}
private void cmdExit_Click(System.Object eventSender, System.EventArgs eventArgs)
{
this.Close();
}
private void cmdNamespace_Click()
{
My.MyProject.Forms.frmFilter.loadFilter(ref gFilter);
getNamespace();
}
private void cmdNew_Click(System.Object eventSender, System.EventArgs eventArgs)
{
ADODB.Recordset rs = default(ADODB.Recordset);
modRecordSet.cnnDB.Execute("INSERT INTO KeyboardLayout ( KeyboardLayout_Name ) SELECT 'New Keyboard';");
rs = modRecordSet.getRS(ref "SELECT Max(KeyboardLayout.KeyboardLayoutID) AS MaxOfKeyboardLayoutID FROM KeyboardLayout;");
My.MyProject.Forms.frmKeyboard.loadItem(ref rs.Fields("MaxOfKeyboardLayoutID").Value);
doSearch();
}
private void DataList1_DblClick(System.Object eventSender, System.EventArgs eventArgs)
{
if (cmdNew.Visible) {
if (!string.IsNullOrEmpty(DataList1.BoundText)) {
My.MyProject.Forms.frmKeyboard.loadItem(ref (DataList1.BoundText));
}
doSearch();
} else {
if (string.IsNullOrEmpty(DataList1.BoundText)) {
gID = 0;
} else {
gID = Convert.ToInt32(DataList1.BoundText);
}
this.Close();
}
}
private void DataList1_KeyPress(System.Object eventSender, KeyPressEventArgs eventArgs)
{
switch (eventArgs.KeyChar) {
case Strings.ChrW(13):
DataList1_DblClick(DataList1, new System.EventArgs());
eventArgs.KeyChar = Strings.ChrW(0);
break;
case Strings.ChrW(27):
this.Close();
eventArgs.KeyChar = Strings.ChrW(0);
break;
}
}
private void frmKeyboardList_KeyDown(System.Object eventSender, System.Windows.Forms.KeyEventArgs eventArgs)
{
short KeyCode = eventArgs.KeyCode;
short Shift = eventArgs.KeyData / 0x10000;
if (KeyCode == 36) {
gAll = !gAll;
doSearch();
KeyCode = false;
}
}
private void frmKeyboardList_KeyPress(System.Object eventSender, System.Windows.Forms.KeyPressEventArgs eventArgs)
{
short KeyAscii = Strings.Asc(eventArgs.KeyChar);
switch (KeyAscii) {
case System.Windows.Forms.Keys.Escape:
KeyAscii = 0;
cmdExit_Click(cmdExit, new System.EventArgs());
break;
}
eventArgs.KeyChar = Strings.Chr(KeyAscii);
if (KeyAscii == 0) {
eventArgs.Handled = true;
}
}
private void frmKeyboardList_Load(System.Object eventSender, System.EventArgs eventArgs)
{
doSearch();
}
private void frmKeyboardList_FormClosed(System.Object eventSender, System.Windows.Forms.FormClosedEventArgs eventArgs)
{
gRS.Close();
}
private void txtSearch_Enter(System.Object eventSender, System.EventArgs eventArgs)
{
txtSearch.SelectionStart = 0;
txtSearch.SelectionLength = 999;
}
private void txtSearch_KeyDown(System.Object eventSender, System.Windows.Forms.KeyEventArgs eventArgs)
{
short KeyCode = eventArgs.KeyCode;
short Shift = eventArgs.KeyData / 0x10000;
switch (KeyCode) {
case 40:
this.DataList1.Focus();
break;
}
}
private void txtSearch_KeyPress(System.Object eventSender, System.Windows.Forms.KeyPressEventArgs eventArgs)
{
short KeyAscii = Strings.Asc(eventArgs.KeyChar);
switch (KeyAscii) {
case 13:
doSearch();
KeyAscii = 0;
break;
}
eventArgs.KeyChar = Strings.Chr(KeyAscii);
if (KeyAscii == 0) {
eventArgs.Handled = true;
}
}
private void doSearch()
{
string sql = null;
string lString = null;
lString = Strings.Trim(txtSearch.Text);
lString = Strings.Replace(lString, " ", " ");
lString = Strings.Replace(lString, " ", " ");
lString = Strings.Replace(lString, " ", " ");
lString = Strings.Replace(lString, " ", " ");
lString = Strings.Replace(lString, " ", " ");
lString = Strings.Replace(lString, " ", " ");
lString = " WHERE (KeyboardLayout_Name LIKE '%" + Strings.Replace(lString, " ", "%' AND KeyboardLayout_Name LIKE '%") + "%')";
gRS = modRecordSet.getRS(ref "SELECT KeyboardLayout.KeyboardLayoutID, KeyboardLayout.KeyboardLayout_Name FROM KeyboardLayout " + lString + " ORDER BY KeyboardLayout_Name");
//Display the list of Titles in the DataCombo
DataList1.DataSource = gRS;
DataList1.listField = "KeyboardLayout_Name";
//Bind the DataCombo to the ADO Recordset
//UPGRADE_ISSUE: VBControlExtender property DataList1.DataSource is not supported at runtime. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="74E732F3-CAD8-417B-8BC9-C205714BB4A7"'
DataList1.DataSource = gRS;
DataList1.boundColumn = "KeyboardLayoutID";
}
}
}
| |
// Copyright (c) Microsoft Open Technologies, Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.CSharp.Syntax;
using Microsoft.CodeAnalysis.Instrumentation;
using Microsoft.CodeAnalysis.Text;
using Roslyn.Utilities;
using InternalSyntax = Microsoft.CodeAnalysis.CSharp.Syntax.InternalSyntax;
using System.Collections.Immutable;
namespace Microsoft.CodeAnalysis.CSharp
{
/// <summary>
/// The parsed representation of a C# source document.
/// </summary>
public abstract partial class CSharpSyntaxTree : SyntaxTree
{
internal static readonly SyntaxTree Dummy = new DummySyntaxTree();
/// <summary>
/// The options used by the parser to produce the syntax tree.
/// </summary>
public new abstract CSharpParseOptions Options { get; }
// REVIEW: I would prefer to not expose CloneAsRoot and make the functionality
// internal to CaaS layer, to ensure that for a given SyntaxTree there can not
// be multiple trees claiming to be its children.
//
// However, as long as we provide GetRoot extensibility point on SyntaxTree
// the guarantee above cannot be implemented and we have to provide some way for
// creating root nodes.
//
// Therefore I place CloneAsRoot API on SyntaxTree and make it protected to
// at least limit its visibility to SyntaxTree extenders.
/// <summary>
/// Produces a clone of a CSharpSyntaxNode which will have current syntax tree as its parent.
///
/// Caller must guarantee that if the same instance of CSharpSyntaxNode makes multiple calls
/// to this function, only one result is observable.
/// </summary>
/// <typeparam name="T">Type of the syntax node.</typeparam>
/// <param name="node">The original syntax node.</param>
/// <returns>A clone of the original syntax node that has current SyntaxTree as its parent.</returns>
protected T CloneNodeAsRoot<T>(T node) where T : CSharpSyntaxNode
{
return CSharpSyntaxNode.CloneNodeAsRoot(node, this);
}
public override Task<SourceText> GetTextAsync(CancellationToken cancellationToken = default(CancellationToken))
{
SourceText text;
if (this.TryGetText(out text))
{
return Task.FromResult(text);
}
else
{
return Task.Factory.StartNew(() => this.GetText(cancellationToken), cancellationToken); // TODO: Should we use ExceptionFilter.ExecuteWithErrorReporting here?
}
}
/// <summary>
/// Gets the root node of the syntax tree.
/// </summary>
public new abstract CSharpSyntaxNode GetRoot(CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Gets the root node of the syntax tree if it is available.
/// </summary>
public abstract bool TryGetRoot(out CSharpSyntaxNode root);
/// <summary>
/// Gets the root node of the syntax tree asynchronously.
/// </summary>
public new virtual Task<CSharpSyntaxNode> GetRootAsync(CancellationToken cancellationToken = default(CancellationToken))
{
CSharpSyntaxNode node;
if (this.TryGetRoot(out node))
{
return Task.FromResult(node);
}
else
{
return Task.Factory.StartNew(() => this.GetRoot(cancellationToken), cancellationToken); // TODO: Should we use ExceptionFilter.ExecuteWithErrorReporting here?
}
}
/// <summary>
/// Returns the root of the syntax tree strongly typed to <see cref="CompilationUnitSyntax"/>.
/// </summary>
/// <remarks>
/// Ensure that <see cref="P:HasCompilationUnitRoot"/> is true for this tree prior to invoking this method.
/// </remarks>
/// <exception cref="InvalidCastException">Throws this exception if <see cref="P:HasCompilationUnitRoot"/> is false.</exception>
public CompilationUnitSyntax GetCompilationUnitRoot(CancellationToken cancellationToken = default(CancellationToken))
{
return (CompilationUnitSyntax)this.GetRoot(cancellationToken);
}
/// <summary>
/// Determines if two trees are the same, disregarding trivia differences.
/// </summary>
/// <param name="tree">The tree to compare against.</param>
/// <param name="topLevel"> If true then the trees are equivalent if the contained nodes and tokens declaring
/// metadata visible symbolic information are equivalent, ignoring any differences of nodes inside method bodies
/// or initializer expressions, otherwise all nodes and tokens must be equivalent.
/// </param>
public override bool IsEquivalentTo(SyntaxTree tree, bool topLevel = false)
{
return SyntaxFactory.AreEquivalent(this, tree, topLevel);
}
#region Factories
/// <summary>
/// Create a new syntax tree from a syntax node.
/// </summary>
public static SyntaxTree Create(CSharpSyntaxNode root, string path = "", CSharpParseOptions options = null)
{
if (root == null) throw new ArgumentNullException("root");
options = options ?? CSharpParseOptions.Default;
return new ParsedSyntaxTree(source: null, path: path, options: options, root: root);
}
/// <summary>
/// Internal helper for <see cref="CSharpSyntaxNode"/> class to create a new syntax tree rooted at the given root node.
/// This method does not create a clone of the given root, but instead preserves it's reference identity.
///
/// NOTE: This method is only intended to be used from <see cref="P:CSharpSyntaxNode.SyntaxTree"/> property.
/// NOTE: Do not use this method elsewhere, instead use <see cref="M:SyntaxTree.Create"/> method for creating a syntax tree.
/// </summary>
internal static SyntaxTree CreateWithoutClone(CSharpSyntaxNode root, string path = "")
{
Debug.Assert(root != null);
return new ParsedSyntaxTree(source: null, path: path, options: CSharpParseOptions.Default, root: root, cloneRoot: false);
}
/// <summary>
/// Produces a syntax tree by parsing the source text.
/// </summary>
public static SyntaxTree ParseText(
string text,
string path = "",
CSharpParseOptions options = null,
CancellationToken cancellationToken = default(CancellationToken))
{
return ParseText(SourceText.From(text), path, options, cancellationToken);
}
/// <summary>
/// Produces a syntax tree by parsing the source text.
/// </summary>
public static SyntaxTree ParseText(
SourceText text,
string path = "",
CSharpParseOptions options = null,
CancellationToken cancellationToken = default(CancellationToken))
{
if (text == null)
{
throw new ArgumentNullException("text");
}
if (path == null)
{
throw new ArgumentNullException("path");
}
using (Logger.LogBlock(FunctionId.CSharp_SyntaxTree_FullParse, path, text.Length, cancellationToken))
{
options = options ?? CSharpParseOptions.Default;
using (var lexer = new Syntax.InternalSyntax.Lexer(text, options))
{
using (var parser = new Syntax.InternalSyntax.LanguageParser(lexer, oldTree: null, changes: null, cancellationToken: cancellationToken))
{
var icompilationUnit = parser.ParseCompilationUnit();
var compilationUnit = (CompilationUnitSyntax)icompilationUnit.CreateRed();
var tree = new ParsedSyntaxTree(text, path, options, compilationUnit);
tree.VerifySource();
return tree;
}
}
}
}
/// <summary>
/// Produces a syntax tree by parsing the source file.
/// </summary>
public static SyntaxTree ParseFile(
string path,
CSharpParseOptions options = null,
CancellationToken cancellationToken = default(CancellationToken))
{
if (string.IsNullOrEmpty(path)) throw new ArgumentException("path");
using (var data = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
return ParseText(new EncodedStringText(data, encodingOpt: null), path, options, cancellationToken);
}
}
#endregion
#region Changes
/// <summary>
/// Create a new syntax based off this tree using a new source text.
///
/// If the new source text is a minor change from the current source text an incremental parse will occur
/// reusing most of the current syntax tree internal data. Otherwise, a full parse will using the new
/// source text.
/// </summary>
public override SyntaxTree WithChangedText(SourceText newText)
{
using (Logger.LogBlock(FunctionId.CSharp_SyntaxTree_IncrementalParse, message: this.FilePath))
{
// try to find the changes between the old text and the new text.
SourceText oldText;
if (this.TryGetText(out oldText))
{
var changes = newText.GetChangeRanges(oldText);
if (changes.Count == 0 && newText == oldText)
{
return this;
}
return this.WithChanges(newText, changes);
}
else
{
// if we do not easily know the old text, then specify entire text as changed so we do a full reparse.
return this.WithChanges(newText, new TextChangeRange[] { new TextChangeRange(new TextSpan(0, this.Length), newText.Length) });
}
}
}
private SyntaxTree WithChanges(SourceText newText, IReadOnlyList<TextChangeRange> changes)
{
if (changes == null)
{
throw new ArgumentNullException("changes");
}
var oldTree = this;
// if changes is entire text do a full reparse
if (changes.Count == 1 && changes[0].Span == new TextSpan(0, this.Length) && changes[0].NewLength == newText.Length)
{
// parser will do a full parse if we give it no changes
changes = null;
oldTree = null;
}
using (var lexer = new InternalSyntax.Lexer(newText, this.Options))
{
CSharp.CSharpSyntaxNode oldRoot = oldTree != null ? oldTree.GetRoot() : null;
using (var parser = new InternalSyntax.LanguageParser(lexer, oldRoot, changes))
{
var compilationUnit = (CompilationUnitSyntax)parser.ParseCompilationUnit().CreateRed();
var tree = new ParsedSyntaxTree(newText, this.FilePath, this.Options, compilationUnit);
tree.VerifySource(changes);
return tree;
}
}
}
/// <summary>
/// Produces a pessimistic list of spans that denote the regions of text in this tree that
/// are changed from the text of the old tree.
/// </summary>
/// <param name="oldTree">The old tree.</param>
/// <remarks>The list is pessimistic because it may claim more or larger regions than actually changed.</remarks>
public override IList<TextSpan> GetChangedSpans(SyntaxTree oldTree)
{
return SyntaxDiffer.GetPossiblyDifferentTextSpans(oldTree, this);
}
/// <summary>
/// Gets a list of text changes that when applied to the old tree produce this tree.
/// </summary>
/// <param name="oldTree">The old tree.</param>
/// <remarks>The list of changes may be different than the original changes that produced this tree.</remarks>
public override IList<TextChange> GetChanges(SyntaxTree oldTree)
{
return SyntaxDiffer.GetTextChanges(oldTree, this);
}
#endregion
#region LinePositions and Locations
/// <summary>
/// Gets the location in terms of path, line and column for a given span.
/// </summary>
/// <param name="span">Span within the tree.</param>
/// <param name="cancellationToken">Cancallation token.</param>
/// <returns>
/// <see cref="FileLinePositionSpan"/> that contains path, line and column information.
/// The values are not affected by line mapping directives (<code>#line</code>).
/// </returns>
public override FileLinePositionSpan GetLineSpan(TextSpan span, CancellationToken cancellationToken = default(CancellationToken))
{
return new FileLinePositionSpan(this.FilePath, GetLinePosition(span.Start), GetLinePosition(span.End));
}
/// <summary>
/// Gets the location in terms of path, line and column after applying source line mapping directives (<code>#line</code>).
/// </summary>
/// <param name="span">Span within the tree.</param>
/// <param name="cancellationToken">Cancallation token.</param>
/// <returns>
/// A valid <see cref="FileLinePositionSpan"/> that contains path, line and column information.
///
/// If the location path is mapped the resulting path is the path specified in the corresponding <code>#line</code>,
/// otherwise it's <see cref="SyntaxTree.FilePath"/>.
///
/// A location path is considered mapped if the first <code>#line</code> directive that preceeds it and that
/// either specifies an explicit file path or is <code>#line default</code> exists and specifies an explicit path.
/// </returns>
public override FileLinePositionSpan GetMappedLineSpan(TextSpan span, CancellationToken cancellationToken = default(CancellationToken))
{
var sourceText = this.GetText(cancellationToken);
var unmappedStartPos = sourceText.Lines.GetLinePosition(span.Start);
var unmappedEndPos = sourceText.Lines.GetLinePosition(span.End);
string path = this.FilePath;
int mappedStartLine = unmappedStartPos.Line;
int mappedEndLine = unmappedEndPos.Line;
return new FileLinePositionSpan(path, new LinePositionSpan(
(mappedStartLine == -1)
? new LinePosition(unmappedStartPos.Character)
: new LinePosition(mappedStartLine, unmappedStartPos.Character),
(mappedEndLine == -1)
? new LinePosition(unmappedEndPos.Character)
: new LinePosition(mappedEndLine, unmappedEndPos.Character)), false);
}
//public override LineVisibility GetLineVisibility(int position, CancellationToken cancellationToken = default(CancellationToken))
//{
// return LineVisibility.Visible;
// //if (lazyLineDirectiveMap == null)
// //{
// // // Create the line directive map on demand.
// // Interlocked.CompareExchange(ref lazyLineDirectiveMap, new CSharpLineDirectiveMap(this), null);
// //}
// //return lazyLineDirectiveMap.GetLineVisibility(this.GetText(cancellationToken), position);
//}
///// <summary>
///// Gets a <see cref="FileLinePositionSpan"/> for a <see cref="TextSpan"/>. FileLinePositionSpans are used
///// primarily for diagnostics and source locations.
///// </summary>
///// <param name="span">The source <see cref="TextSpan" /> to convert.</param>
///// <param name="isHiddenPosition">Returns a boolean indicating whether this span is considered hidden or not.</param>
///// <returns>A resulting <see cref="FileLinePositionSpan"/>.</returns>
//internal override FileLinePositionSpan GetMappedLineSpanAndVisibility(TextSpan span, out bool isHiddenPosition)
//{
// if (lazyLineDirectiveMap == null)
// {
// // Create the line directive map on demand.
// Interlocked.CompareExchange(ref lazyLineDirectiveMap, new CSharpLineDirectiveMap(this), null);
// }
// return lazyLineDirectiveMap.TranslateSpanAndVisibility(this.GetText(), this.FilePath, span, out isHiddenPosition);
//}
/// <summary>
/// Are there any hidden regions in the tree?
/// </summary>
/// <returns>True if there is at least one hidden region.</returns>
public override bool HasHiddenRegions()
{
return false;
}
private LinePosition GetLinePosition(int position)
{
return this.GetText().Lines.GetLinePosition(position);
}
/// <summary>
/// Gets a <see cref="Location"/> for the specified text span.
/// </summary>
public override Location GetLocation(TextSpan span)
{
return new SourceLocation(this, span);
}
#endregion
#region Diagnostics
/// <summary>
/// Gets a list of all the diagnostics in the sub tree that has the specified node as its root.
/// This method does not filter diagnostics based on #pragmas and compiler options
/// like nowarn, warnaserror etc.
/// </summary>
public override IEnumerable<Diagnostic> GetDiagnostics(SyntaxNode node)
{
if (node == null)
throw new ArgumentNullException("node");
return GetDiagnostics(node.Green, node.Position);
}
private IEnumerable<Diagnostic> GetDiagnostics(GreenNode greenNode, int position)
{
if (greenNode == null)
throw new InvalidOperationException();
if (greenNode.ContainsDiagnostics)
{
return EnumerateDiagnostics(greenNode, position);
}
return SpecializedCollections.EmptyEnumerable<Diagnostic>();
}
private IEnumerable<Diagnostic> EnumerateDiagnostics(GreenNode node, int position)
{
var enumerator = new SyntaxTreeDiagnosticEnumerator(this, node, position);
while (enumerator.MoveNext())
{
yield return enumerator.Current;
}
}
/// <summary>
/// Gets a list of all the diagnostics associated with the token and any related trivia.
/// This method does not filter diagnostics based on #pragmas and compiler options
/// like nowarn, warnaserror etc.
/// </summary>
public override IEnumerable<Diagnostic> GetDiagnostics(SyntaxToken token)
{
return GetDiagnostics((InternalSyntax.CSharpSyntaxNode)token.Node, token.Position);
}
/// <summary>
/// Gets a list of all the diagnostics associated with the trivia.
/// This method does not filter diagnostics based on #pragmas and compiler options
/// like nowarn, warnaserror etc.
/// </summary>
public override IEnumerable<Diagnostic> GetDiagnostics(SyntaxTrivia trivia)
{
return GetDiagnostics((InternalSyntax.CSharpSyntaxNode)trivia.UnderlyingNode, trivia.Position);
}
/// <summary>
/// Gets a list of all the diagnostics in either the sub tree that has the specified node as its root or
/// associated with the token and its related trivia.
/// This method does not filter diagnostics based on #pragmas and compiler options
/// like nowarn, warnaserror etc.
/// </summary>
public override IEnumerable<Diagnostic> GetDiagnostics(SyntaxNodeOrToken nodeOrToken)
{
return GetDiagnostics((InternalSyntax.CSharpSyntaxNode)nodeOrToken.UnderlyingNode, nodeOrToken.Position);
}
/// <summary>
/// Gets a list of all the diagnostics in the syntax tree.
/// This method does not filter diagnostics based on #pragmas and compiler options
/// like nowarn, warnaserror etc.
/// </summary>
public override IEnumerable<Diagnostic> GetDiagnostics(CancellationToken cancellationToken = default(CancellationToken))
{
return this.GetDiagnostics(this.GetRoot(cancellationToken));
}
#endregion
#region SyntaxTree
protected override SyntaxNode GetRootCore(CancellationToken cancellationToken)
{
return this.GetRoot(cancellationToken);
}
protected override async Task<SyntaxNode> GetRootAsyncCore(CancellationToken cancellationToken)
{
return await this.GetRootAsync(cancellationToken).ConfigureAwait(false);
}
protected override bool TryGetRootCore(out SyntaxNode root)
{
CSharpSyntaxNode node;
if (this.TryGetRoot(out node))
{
root = node;
return true;
}
else
{
root = null;
return false;
}
}
protected override ParseOptions OptionsCore
{
get
{
return this.Options;
}
}
#endregion
}
}
| |
#region License, Terms and Author(s)
//
// ELMAH - Error Logging Modules and Handlers for ASP.NET
// Copyright (c) 2004-9 Atif Aziz. All rights reserved.
//
// Author(s):
//
// Atif Aziz, http://www.raboof.com
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#endregion
[assembly: Elmah.Scc("$Id$")]
namespace Elmah
{
#region Imports
using System;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Net;
using System.Text;
using System.Web;
using System.Collections.Generic;
using IDictionary = System.Collections.IDictionary;
#endregion
/// <summary>
/// HTTP module implementation that posts tweets (short messages
/// usually limited to 140 characters) about unhandled exceptions in
/// an ASP.NET Web application to a Twitter account.
/// </summary>
/// <remarks>
/// This module requires that the hosting application has permissions
/// send HTTP POST requests to another Internet domain.
/// </remarks>
public class ErrorTweetModule : HttpModuleBase, IExceptionFiltering
{
public event ExceptionFilterEventHandler Filtering;
private ICredentials _credentials;
private string _statusFormat;
private Uri _url;
private int _maxStatusLength;
private string _ellipsis;
private string _formFormat;
private List<WebRequest> _requests;
/// <summary>
/// Initializes the module and prepares it to handle requests.
/// </summary>
protected override void OnInit(HttpApplication application)
{
if (application == null)
throw new ArgumentNullException("application");
//
// Get the configuration section of this module.
// If it's not there then there is nothing to initialize or do.
// In this case, the module is as good as mute.
//
IDictionary config = (IDictionary) GetConfig();
if (config == null)
return;
string userName = GetSetting(config, "userName", string.Empty);
string password = GetSetting(config, "password", string.Empty);
string statusFormat = GetSetting(config, "statusFormat", "{Message}");
int maxStatusLength = int.Parse(GetSetting(config, "maxStatusLength", "140"), NumberStyles.None, CultureInfo.InvariantCulture);
string ellipsis = GetSetting(config, "ellipsis", /* ... */ "\x2026");
string formFormat = GetSetting(config, "formFormat", "status={0}");
Uri url = new Uri(GetSetting(config, "url", "http://twitter.com/statuses/update.xml"), UriKind.Absolute);
_credentials = new NetworkCredential(userName, password);
_statusFormat = statusFormat;
_url = url;
_maxStatusLength = maxStatusLength;
_ellipsis = ellipsis;
_formFormat = formFormat;
_requests = new List<WebRequest>();
application.Error += new EventHandler(OnError);
ErrorSignal.Get(application).Raised += new ErrorSignalEventHandler(OnErrorSignaled);
}
/// <summary>
/// Gets the <see cref="ErrorLog"/> instance to which the module
/// will log exceptions.
/// </summary>
protected virtual ErrorLog GetErrorLog(HttpContext context)
{
return ErrorLog.GetDefault(context);
}
/// <summary>
/// The handler called when an unhandled exception bubbles up to
/// the module.
/// </summary>
protected virtual void OnError(object sender, EventArgs args)
{
HttpApplication application = (HttpApplication) sender;
LogException(application.Server.GetLastError(), application.Context);
}
/// <summary>
/// The handler called when an exception is explicitly signaled.
/// </summary>
protected virtual void OnErrorSignaled(object sender, ErrorSignalEventArgs args)
{
LogException(args.Exception, args.Context);
}
/// <summary>
/// Logs an exception and its context to the error log.
/// </summary>
protected virtual void LogException(Exception e, HttpContext context)
{
if (e == null)
throw new ArgumentNullException("e");
//
// Fire an event to check if listeners want to filter out
// logging of the uncaught exception.
//
ExceptionFilterEventArgs args = new ExceptionFilterEventArgs(e, context);
OnFiltering(args);
if (args.Dismissed)
return;
//
// Tweet away...
//
HttpWebRequest request = null;
try
{
string status = StringFormatter.Format(_statusFormat, new Error(e, context));
//
// Apply ellipsis if status is too long. If the trimmed
// status plus ellipsis yields nothing then just use
// the trimmed status without ellipsis. This can happen if
// someone gives an ellipsis that is ridiculously long.
//
int maxLength = _maxStatusLength;
if (status.Length > maxLength)
{
string ellipsis = _ellipsis;
int trimmedStatusLength = maxLength - ellipsis.Length;
status = trimmedStatusLength >= 0
? status.Substring(0, trimmedStatusLength) + ellipsis
: status.Substring(0, maxLength);
}
//
// Submit the status by posting form data as typically down
// by browsers for forms found in HTML.
//
request = (HttpWebRequest) WebRequest.Create(_url);
request.Method = "POST"; // WebRequestMethods.Http.Post;
request.ContentType = "application/x-www-form-urlencoded";
if (_credentials != null) // Need Basic authentication?
{
request.Credentials = _credentials;
request.PreAuthenticate = true;
}
// See http://blogs.msdn.com/shitals/archive/2008/12/27/9254245.aspx
request.ServicePoint.Expect100Continue = false;
//
// URL-encode status into the form and get the bytes to
// determine and set the content length.
//
string encodedForm = string.Format(_formFormat, HttpUtility.UrlEncode(status));
byte[] data = Encoding.ASCII.GetBytes(encodedForm);
Debug.Assert(data.Length > 0);
request.ContentLength = data.Length;
//
// Get the request stream into which the form data is to
// be written. This is done asynchronously to free up this
// thread.
//
// NOTE: We maintain a (possibly paranoid) list of
// outstanding requests and add the request to it so that
// it does not get treated as garbage by GC. In effect,
// we are creating an explicit root. It is also possible
// for this module to get disposed before a request
// completes. During the callback, no other member should
// be touched except the requests list!
//
_requests.Add(request);
IAsyncResult ar = request.BeginGetRequestStream(
new AsyncCallback(OnGetRequestStreamCompleted),
AsyncArgs(request, data));
}
catch (Exception localException)
{
//
// IMPORTANT! We swallow any exception raised during the
// logging and send them out to the trace . The idea
// here is that logging of exceptions by itself should not
// be critical to the overall operation of the application.
// The bad thing is that we catch ANY kind of exception,
// even system ones and potentially let them slip by.
//
OnWebPostError(request, localException);
}
}
private void OnWebPostError(WebRequest request, Exception e)
{
Debug.Assert(e != null);
Trace.WriteLine(e);
if (request != null) _requests.Remove(request);
}
private static object[] AsyncArgs(params object[] args)
{
return args;
}
private void OnGetRequestStreamCompleted(IAsyncResult ar)
{
if (ar == null) throw new ArgumentNullException("ar");
object[] args = (object[]) ar.AsyncState;
OnGetRequestStreamCompleted(ar, (WebRequest) args[0], (byte[]) args[1]);
}
private void OnGetRequestStreamCompleted(IAsyncResult ar, WebRequest request, byte[] data)
{
Debug.Assert(ar != null);
Debug.Assert(request != null);
Debug.Assert(data != null);
Debug.Assert(data.Length > 0);
try
{
using (Stream output = request.EndGetRequestStream(ar))
output.Write(data, 0, data.Length);
request.BeginGetResponse(new AsyncCallback(OnGetResponseCompleted), request);
}
catch (Exception e)
{
OnWebPostError(request, e);
}
}
private void OnGetResponseCompleted(IAsyncResult ar)
{
if (ar == null) throw new ArgumentNullException("ar");
OnGetResponseCompleted(ar, (WebRequest) ar.AsyncState);
}
private void OnGetResponseCompleted(IAsyncResult ar, WebRequest request)
{
Debug.Assert(ar != null);
Debug.Assert(request != null);
try
{
Debug.Assert(request != null);
request.EndGetResponse(ar).Close(); // Not interested; assume OK
_requests.Remove(request);
}
catch (Exception e)
{
OnWebPostError(request, e);
}
}
/// <summary>
/// Raises the <see cref="Filtering"/> event.
/// </summary>
protected virtual void OnFiltering(ExceptionFilterEventArgs args)
{
ExceptionFilterEventHandler handler = Filtering;
if (handler != null)
handler(this, args);
}
/// <summary>
/// Determines whether the module will be registered for discovery
/// in partial trust environments or not.
/// </summary>
protected override bool SupportDiscoverability
{
get { return true; }
}
/// <summary>
/// Gets the configuration object used by <see cref="OnInit"/> to read
/// the settings for module.
/// </summary>
protected virtual object GetConfig()
{
return Configuration.GetSubsection("errorTweet");
}
private static string GetSetting(IDictionary config, string name, string defaultValue)
{
Debug.Assert(config != null);
Debug.AssertStringNotEmpty(name);
string value = ((string)config[name]) ?? string.Empty;
if (value.Length == 0)
{
if (defaultValue == null)
{
throw new ApplicationException(string.Format(
"The required configuration setting '{0}' is missing for the error tweeting module.", name));
}
value = defaultValue;
}
return value;
}
}
}
| |
// Copyright (c) 2010-2014 SharpDX - Alexandre Mutel
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
using System;
using System.IO;
using System.Runtime.InteropServices;
using System.Text;
namespace SharpDX.IO
{
/// <summary>
/// Windows File Helper.
/// </summary>
public static class NativeFile
{
#if W8CORE
private const string KERNEL_FILE = "api-ms-win-core-file-l1-2-0.dll";
#else
private const string KERNEL_FILE = "kernel32.dll";
#endif
/// <summary>
/// Checks if the specified file path exists.
/// </summary>
/// <param name="filePath">The file path.</param>
/// <returns><c>true</c> if the specified file path exists, <c>false</c> otherwise</returns>
public static bool Exists(string filePath)
{
try
{
#if !WIN8METRO
var fullPath = Path.GetFullPath(filePath);
#else
var fullPath = filePath;
#endif
WIN32_FILE_ATTRIBUTE_DATA data;
if (GetFileAttributesEx(fullPath, 0, out data))
{
return true;
}
} catch {}
return false;
}
/// <summary>
/// Opens a binary file, reads the contents of the file into a byte array, and then closes the file.
/// </summary>
/// <param name="path">The file to open for reading. </param>
/// <returns>A byte array containing the contents of the file.</returns>
public static byte[] ReadAllBytes(string path)
{
byte[] buffer;
using (var stream = new NativeFileStream(path, NativeFileMode.Open, NativeFileAccess.Read))
{
int offset = 0;
long length = stream.Length;
if (length > 0x7fffffffL)
{
throw new IOException("File too long");
}
int count = (int)length;
buffer = new byte[count];
while (count > 0)
{
int num4 = stream.Read(buffer, offset, count);
if (num4 == 0)
{
throw new EndOfStreamException();
}
offset += num4;
count -= num4;
}
}
return buffer;
}
/// <summary>
/// Opens a text file, reads all lines of the file, and then closes the file.
/// </summary>
/// <param name="path">The file to open for reading. </param>
/// <returns>A string containing all lines of the file.</returns>
public static string ReadAllText(string path)
{
return ReadAllText(path, Encoding.UTF8);
}
/// <summary>
/// Opens a text file, reads all lines of the file, and then closes the file.
/// </summary>
/// <param name="path">The file to open for reading.</param>
/// <param name="encoding">The encoding.</param>
/// <param name="sharing">The sharing.</param>
/// <returns>A string containing all lines of the file.</returns>
public static string ReadAllText(string path, Encoding encoding, NativeFileShare sharing = NativeFileShare.Read)
{
using (var stream = new NativeFileStream(path, NativeFileMode.Open, NativeFileAccess.Read, sharing))
{
using (StreamReader reader = new StreamReader(stream, encoding, true, 0x400))
{
return reader.ReadToEnd();
}
}
}
#if W8CORE
internal struct CREATEFILE2_EXTENDED_PARAMETERS
{
public uint dwSize;
public uint dwFileAttributes;
public uint dwFileFlags;
public uint dwSecurityQosFlags;
public IntPtr lpSecurityAttributes;
public IntPtr hTemplateFile;
};
private enum FILE_INFO_BY_HANDLE_CLASS : int
{
FileBasicInfo = 0,
FileStandardInfo = 1,
FileNameInfo = 2,
FileRenameInfo = 3,
FileDispositionInfo = 4,
FileAllocationInfo = 5,
FileEndOfFileInfo = 6,
FileStreamInfo = 7,
FileCompressionInfo = 8,
FileAttributeTagInfo = 9,
FileIdBothDirectoryInfo = 10, // 0xA
FileIdBothDirectoryRestartInfo = 11, // 0xB
FileIoPriorityHintInfo = 12, // 0xC
FileRemoteProtocolInfo = 13, // 0xD
FileFullDirectoryInfo = 14, // 0xE
FileFullDirectoryRestartInfo = 15, // 0xF
FileStorageInfo = 16, // 0x10
FileAlignmentInfo = 17, // 0x11
MaximumFileInfoByHandlesClass
};
[StructLayout(LayoutKind.Sequential)]
private struct FILE_STANDARD_INFO
{
public long AllocationSize;
public long EndOfFile;
public int NumberOfLinks;
public int DeletePending;
public int Directory;
};
/// <summary>
/// Gets the size of the file.
/// </summary>
/// <param name="handle">The handle.</param>
/// <param name="fileSize">Size of the file.</param>
/// <returns></returns>
/// <unmanaged>GetFileSizeEx</unmanaged>
internal static bool GetFileSizeEx(IntPtr handle, out long fileSize)
{
FILE_STANDARD_INFO info;
unsafe
{
var result = GetFileInformationByHandleEx(handle, FILE_INFO_BY_HANDLE_CLASS.FileStandardInfo, new IntPtr(&info), Utilities.SizeOf<FILE_STANDARD_INFO>());
fileSize = info.EndOfFile;
return result;
}
}
#endif
[StructLayout(LayoutKind.Sequential)]
internal struct FILETIME
{
public uint DateTimeLow;
public uint DateTimeHigh;
public DateTime ToDateTime()
{
return DateTime.FromFileTimeUtc((((long)DateTimeHigh) << 32) | ((uint)DateTimeLow));
}
}
[StructLayout(LayoutKind.Sequential)]
internal struct WIN32_FILE_ATTRIBUTE_DATA
{
public uint FileAttributes;
public FILETIME CreationTime;
public FILETIME LastAccessTime;
public FILETIME LastWriteTime;
public uint FileSizeHigh;
public uint FileSizeLow;
}
/// <summary>
/// Gets the last write time access for the specified path.
/// </summary>
/// <param name="path">The path.</param>
/// <returns>The last write time access</returns>
public static DateTime GetLastWriteTime(string path)
{
WIN32_FILE_ATTRIBUTE_DATA data;
if (GetFileAttributesEx(path, 0, out data))
{
return data.LastWriteTime.ToDateTime().ToLocalTime();
}
return new DateTime(0);
}
#if WP8
[UnmanagedFunctionPointer(CallingConvention.StdCall)]
internal delegate bool ReadFileDelegate(IntPtr fileHandle, IntPtr buffer, int numberOfBytesToRead, out int numberOfBytesRead, IntPtr overlapped);
internal static readonly ReadFileDelegate ReadFile;
[UnmanagedFunctionPointer(CallingConvention.StdCall)]
internal delegate bool FlushFileBuffersDelegate(IntPtr hFile);
internal static readonly FlushFileBuffersDelegate FlushFileBuffers;
[UnmanagedFunctionPointer(CallingConvention.StdCall)]
internal delegate bool WriteFileDelegate(IntPtr fileHandle, IntPtr buffer, int numberOfBytesToRead, out int numberOfBytesRead, IntPtr overlapped);
internal static readonly WriteFileDelegate WriteFile;
[UnmanagedFunctionPointer(CallingConvention.StdCall)]
internal delegate bool SetFilePointerExDelegate(IntPtr handle, long distanceToMove, out long distanceToMoveHigh, SeekOrigin seekOrigin);
internal static readonly SetFilePointerExDelegate SetFilePointerEx;
[UnmanagedFunctionPointer(CallingConvention.StdCall)]
internal delegate bool SetEndOfFileDelegate(IntPtr handle);
internal static readonly SetEndOfFileDelegate SetEndOfFile;
[UnmanagedFunctionPointer(CallingConvention.StdCall)]
internal delegate IntPtr CreateDelegate([MarshalAs(UnmanagedType.LPWStr)] string fileName, NativeFileAccess desiredAccess, NativeFileShare shareMode, NativeFileMode mode, IntPtr extendedParameters);
internal static readonly CreateDelegate Create;
[UnmanagedFunctionPointer(CallingConvention.StdCall)]
private delegate bool GetFileInformationByHandleExDelegate(IntPtr handle, FILE_INFO_BY_HANDLE_CLASS FileInformationClass, IntPtr lpFileInformation, int dwBufferSize);
private static readonly GetFileInformationByHandleExDelegate GetFileInformationByHandleEx;
[UnmanagedFunctionPointer(CallingConvention.StdCall)]
internal delegate bool GetFileAttributesExDelegate([MarshalAs(UnmanagedType.LPWStr)] string name, int fileInfoLevel, out WIN32_FILE_ATTRIBUTE_DATA lpFileInformation);
internal static readonly GetFileAttributesExDelegate GetFileAttributesEx;
static NativeFile()
{
// Initialize all the DllImports at once, since we are going to use all of them anyway.
ReadFile = (ReadFileDelegate) Marshal.GetDelegateForFunctionPointer(new IntPtr(SharpDX.WP8.Interop.ReadFile()), typeof (ReadFileDelegate));
FlushFileBuffers = (FlushFileBuffersDelegate)Marshal.GetDelegateForFunctionPointer(new IntPtr(SharpDX.WP8.Interop.FlushFileBuffers()), typeof(FlushFileBuffersDelegate));
WriteFile = (WriteFileDelegate)Marshal.GetDelegateForFunctionPointer(new IntPtr(SharpDX.WP8.Interop.WriteFile()), typeof(WriteFileDelegate));
SetFilePointerEx = (SetFilePointerExDelegate)Marshal.GetDelegateForFunctionPointer(new IntPtr(SharpDX.WP8.Interop.SetFilePointerEx()), typeof(SetFilePointerExDelegate));
SetEndOfFile = (SetEndOfFileDelegate)Marshal.GetDelegateForFunctionPointer(new IntPtr(SharpDX.WP8.Interop.SetEndOfFile()), typeof(SetEndOfFileDelegate));
Create = (CreateDelegate)Marshal.GetDelegateForFunctionPointer(new IntPtr(SharpDX.WP8.Interop.CreateFile2()), typeof(CreateDelegate));
GetFileInformationByHandleEx = (GetFileInformationByHandleExDelegate)Marshal.GetDelegateForFunctionPointer(new IntPtr(SharpDX.WP8.Interop.GetFileInformationByHandleEx()), typeof(GetFileInformationByHandleExDelegate));
GetFileAttributesEx = (GetFileAttributesExDelegate)Marshal.GetDelegateForFunctionPointer(new IntPtr(SharpDX.WP8.Interop.GetFileAttributesExW()), typeof(GetFileAttributesExDelegate));
}
#else
/// <summary>
/// Reads to a file.
/// </summary>
/// <param name="fileHandle">The file handle.</param>
/// <param name="buffer">The buffer.</param>
/// <param name="numberOfBytesToRead">The number of bytes to read.</param>
/// <param name="numberOfBytesRead">The number of bytes read.</param>
/// <param name="overlapped">The overlapped.</param>
/// <returns>A Result</returns>
/// <unmanaged>ReadFile</unmanaged>
[DllImport(KERNEL_FILE, EntryPoint = "ReadFile", SetLastError = true, CharSet = CharSet.Unicode)]
internal static extern bool ReadFile(IntPtr fileHandle, IntPtr buffer, int numberOfBytesToRead, out int numberOfBytesRead, IntPtr overlapped);
[DllImport(KERNEL_FILE, EntryPoint = "FlushFileBuffers", SetLastError = true)]
internal static extern bool FlushFileBuffers(IntPtr hFile);
/// <summary>
/// Writes to a file.
/// </summary>
/// <param name="fileHandle">The file handle.</param>
/// <param name="buffer">The buffer.</param>
/// <param name="numberOfBytesToRead">The number of bytes to read.</param>
/// <param name="numberOfBytesRead">The number of bytes read.</param>
/// <param name="overlapped">The overlapped.</param>
/// <returns>A Result</returns>
/// <unmanaged>WriteFile</unmanaged>
[DllImport(KERNEL_FILE, EntryPoint = "WriteFile", SetLastError = true, CharSet = CharSet.Unicode)]
internal static extern bool WriteFile(IntPtr fileHandle, IntPtr buffer, int numberOfBytesToRead, out int numberOfBytesRead, IntPtr overlapped);
/// <summary>
/// Sets the file pointer.
/// </summary>
/// <param name="handle">The handle.</param>
/// <param name="distanceToMove">The distance to move.</param>
/// <param name="distanceToMoveHigh">The distance to move high.</param>
/// <param name="seekOrigin">The seek origin.</param>
/// <returns></returns>
/// <unmanaged>SetFilePointerEx</unmanaged>
[DllImport(KERNEL_FILE, EntryPoint = "SetFilePointerEx", SetLastError = true, CharSet = CharSet.Unicode)]
internal static extern bool SetFilePointerEx(IntPtr handle, long distanceToMove, out long distanceToMoveHigh, SeekOrigin seekOrigin);
/// <summary>
/// Sets the end of file.
/// </summary>
/// <param name="handle">The handle.</param>
/// <returns></returns>
/// <unmanaged>SetEndOfFile</unmanaged>
[DllImport(KERNEL_FILE, EntryPoint = "SetEndOfFile", SetLastError = true, CharSet = CharSet.Unicode)]
internal static extern bool SetEndOfFile(IntPtr handle);
[DllImport(KERNEL_FILE, EntryPoint = "GetFileAttributesExW", CharSet = CharSet.Unicode, SetLastError = true)]
internal static extern bool GetFileAttributesEx(string name, int fileInfoLevel, out WIN32_FILE_ATTRIBUTE_DATA lpFileInformation);
#if W8CORE
/// <summary>
/// Creates the file.
/// </summary>
/// <param name="fileName">Name of the file.</param>
/// <param name="desiredAccess">The desired access.</param>
/// <param name="shareMode">The share mode.</param>
/// <param name="mode">The creation disposition.</param>
/// <param name="extendedParameters">The extended parameters.</param>
/// <returns>A handle to the created file. IntPtr.Zero if failed.</returns>
/// <unmanaged>CreateFile2</unmanaged>
[DllImport(KERNEL_FILE, EntryPoint = "CreateFile2", SetLastError = true, CharSet = CharSet.Unicode)]
internal static extern IntPtr Create(
string fileName,
NativeFileAccess desiredAccess,
NativeFileShare shareMode,
NativeFileMode mode,
IntPtr extendedParameters);
[DllImport("api-ms-win-core-file-l2-1-0.dll", EntryPoint = "GetFileInformationByHandleEx", SetLastError = true, CharSet = CharSet.Unicode)]
private static extern bool GetFileInformationByHandleEx(IntPtr handle, FILE_INFO_BY_HANDLE_CLASS FileInformationClass, IntPtr lpFileInformation, int dwBufferSize);
#else
/// <summary>
/// Creates the file.
/// </summary>
/// <param name="fileName">Name of the file.</param>
/// <param name="desiredAccess">The desired access.</param>
/// <param name="shareMode">The share mode.</param>
/// <param name="securityAttributes">The security attributes.</param>
/// <param name="mode">The creation disposition.</param>
/// <param name="flagsAndOptions">The flags and attributes.</param>
/// <param name="templateFile">The template file.</param>
/// <returns>A handle to the created file. IntPtr.Zero if failed.</returns>
/// <unmanaged>CreateFile</unmanaged>
[DllImport("kernel32.dll", EntryPoint = "CreateFile", SetLastError = true, CharSet = CharSet.Unicode)]
internal static extern IntPtr Create(
string fileName,
NativeFileAccess desiredAccess,
NativeFileShare shareMode,
IntPtr securityAttributes,
NativeFileMode mode,
NativeFileOptions flagsAndOptions,
IntPtr templateFile);
/// <summary>
/// Gets the size of the file.
/// </summary>
/// <param name="handle">The handle.</param>
/// <param name="fileSize">Size of the file.</param>
/// <returns></returns>
/// <unmanaged>GetFileSizeEx</unmanaged>
[DllImport("kernel32.dll", EntryPoint = "GetFileSizeEx", SetLastError = true, CharSet = CharSet.Unicode)]
internal static extern bool GetFileSizeEx(IntPtr handle, out long fileSize);
#endif
#endif
// END WP8
}
}
| |
using UnityEngine;
using System.Collections.Generic;
using CommonComponent;
namespace GameResource
{
public class ResourceDataManager : ResourceSingleton<ResourceDataManager>
{
public Object Load(ulong pathHash, System.Type type)
{
ResourceData resourceData = _GetResourceData(pathHash);
if (resourceData != null)
{
Log.Assert(resourceData.type == type);
resourceData.AddRef();
return resourceData.asset;
}
Object asset = null;
if (ResourceMainfest.IsAssetBundleResource(pathHash))
{
AssetBundle assetBundle = AssetBundleManager.Instance.Load(pathHash);
string name = ResourceMainfest.GetPathName(pathHash);
if (assetBundle != null && !string.IsNullOrEmpty(name))
{
asset = assetBundle.LoadAsset(name, type);
}
AssetBundleManager.Instance.Unload(pathHash);
}
if (asset == null)
{
string path = ResourceMainfest.GetHashPath(pathHash);
if (!string.IsNullOrEmpty(path))
{
asset = Resources.Load(path, type);
}
}
if (asset != null)
{
resourceData = new ResourceData(pathHash, type, asset);
resourceData.AddRef();
_AddResourceData(resourceData);
}
return asset;
}
public ResourceLoadHandle LoadAsync(ulong pathHash, System.Type type, int priority)
{
ResourceLoadHandle loadHandle = new ResourceLoadHandle();
ResourceData resourceData = _GetResourceData(pathHash);
if (resourceData == null)
{
ResourceLoadRequest request = m_loader.TryGetRequest(pathHash);
if (request == null)
{
request = new ResourceLoadRequest(pathHash, type, priority);
request.OnLoadDone += _OnRequestDone;
m_loader.PushRequst(request);
}
else
{
Log.Assert(request.type == type);
}
request.AddLoadHandle(loadHandle);
}
else
{
Log.Assert(resourceData.type == type);
resourceData.AddRef();
loadHandle.isDone = true;
loadHandle.asset = resourceData.asset;
}
return loadHandle;
}
public void Unload(Object asset)
{
if (asset == null)
return;
ulong pathHash = 0;
if (!m_assetID2PathHash.TryGetValue(asset.GetInstanceID(), out pathHash))
{
Log.ErrorFormat("[ResourceDataManager]Failed to unload unknown asset({0})", asset.name);
return;
}
ResourceData data = _GetResourceData(pathHash);
if (data == null)
{
Log.ErrorFormat("[ResourceDataManager]Failed to get resource data for asset({0})", asset.name);
return;
}
data.Release();
}
public List<ulong> GetResourcePathList()
{
m_tempPathList.Clear();
foreach (var node in m_pathHash2Data)
{
m_tempPathList.Add(node.Key);
}
return m_tempPathList;
}
public void UnloadUnusedAssets()
{
List<ulong> unLoadList = new List<ulong>();
foreach (var itor in m_pathHash2Data)
{
ResourceData resourceData = itor.Value;
if (resourceData == null || resourceData.IsRefZero())
{
unLoadList.Add(itor.Key);
}
}
for (int i = 0; i < unLoadList.Count; ++i)
{
m_pathHash2Data.Remove(unLoadList[i]);
}
}
protected override bool Init()
{
ResourceUpdater.Instance.RegisterUpdater(m_loader.Update);
return base.Init();
}
protected override bool UnInit()
{
ResourceUpdater.Instance.UnRegisterUpdater(m_loader.Update);
m_loader = null;
return base.UnInit();
}
private ResourceData _GetResourceData(ulong pathHash)
{
ResourceData data = null;
if (!m_pathHash2Data.TryGetValue(pathHash, out data))
{
return null;
}
return data;
}
private void _AddResourceData(ResourceData resourceData)
{
ResourceData existedData = null;
if (m_pathHash2Data.TryGetValue(resourceData.Id, out existedData))
{
// LoadAsync first and Load before LoadAsync finished
existedData.MergeRefCount(resourceData);
return;
}
m_pathHash2Data.Add(resourceData.Id, resourceData);
int insId = resourceData.asset.GetInstanceID();
if (m_assetID2PathHash.ContainsKey(insId))
{
m_assetID2PathHash[insId] = resourceData.Id;
}
else
{
m_assetID2PathHash.Add(insId, resourceData.Id);
}
}
private void _OnRequestDone(RequestBase<ResourceData> requestBase)
{
ResourceLoadRequest request = requestBase as ResourceLoadRequest;
if (request == null)
{
Log.Error("[ResourceDataManager]Invalid request.");
return;
}
ResourceData resourceData = request.asset as ResourceData;
if (resourceData == null)
{
Log.Error("[ResourceDataManager]Invalid resource data.");
return;
}
_AddResourceData(resourceData);
}
private List<ulong> m_tempPathList = new List<ulong>();
private Dictionary<int, ulong> m_assetID2PathHash = new Dictionary<int, ulong>();
private Dictionary<ulong, ResourceData> m_pathHash2Data = new Dictionary<ulong, ResourceData>();
private RequestLoader<ResourceLoadRequest, ResourceData> m_loader = new RequestLoader<ResourceLoadRequest, ResourceData>();
}
}
| |
/* Copyright (c) 2006-2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* Change history
* Oct 13 2008 Joe Feser [email protected]
* Converted ArrayLists and other .NET 1.1 collections to use Generics
* Combined IExtensionElement and IExtensionElementFactory interfaces
*
*/
#region Using directives
#define USE_TRACING
using System;
using System.Xml;
using System.Net;
using System.IO;
using System.Collections;
using System.Globalization;
using System.ComponentModel;
using System.Runtime.InteropServices;
using Google.GData.Extensions.AppControl;
#endregion
// <summary>Contains AtomEntry, an object to represent the atom:entry
// element.</summary>
namespace Google.GData.Client {
/// <summary>TypeConverter, so that AtomEntry shows up in the property pages
/// </summary>
[ComVisible(false)]
public class AtomEntryConverter : ExpandableObjectConverter {
///<summary>Standard type converter method</summary>
public override bool CanConvertTo(ITypeDescriptorContext context, System.Type destinationType) {
if (destinationType == typeof(AtomEntry))
return true;
return base.CanConvertTo(context, destinationType);
}
///<summary>Standard type converter method</summary>
public override object ConvertTo(ITypeDescriptorContext context, CultureInfo culture, object value, System.Type destinationType) {
AtomEntry entry = value as AtomEntry;
if (destinationType == typeof(System.String) && entry != null) {
return "Entry: " + entry.Title;
}
return base.ConvertTo(context, culture, value, destinationType);
}
}
/// <summary>AtomEntry object, representing an item in the RSS/Atom feed
/// Version 1.0 removed atom-Head
/// element atom:entry {
/// atomCommonAttributes,
/// (atomAuthor*
/// atomCategory*
/// atomContent?
/// atomContributor*
/// atomId
/// atomLink*
/// atomPublished?
/// atomRights?
/// atomSource?
/// atomSummary?
/// atomTitle
/// atomUpdated
/// extensionElement*)
/// }
/// </summary>
[TypeConverterAttribute(typeof(AtomEntryConverter)), DescriptionAttribute("Expand to see the entry objects for the feed.")]
public class AtomEntry : AtomBase {
#region standard entry properties as returned by query
/// <summary>/feed/entry/title property as string</summary>
private AtomTextConstruct title;
/// <summary>/feed/entry/id property as string</summary>
private AtomId id;
/// <summary>/feed/entry/link collection</summary>
private AtomLinkCollection links;
/// <summary>/feed/entry/updated property as string</summary>
private DateTime lastUpdateDate;
/// <summary>/feed/entry/published property as string</summary>
private DateTime publicationDate;
/// <summary>/feed/entry/author property as Author object</summary>
private AtomPersonCollection authors;
/// <summary>/feed/entry/atomContributor property as Author object</summary>
private AtomPersonCollection contributors;
/// <summary>The "atom:rights" element is a Text construct that conveys a human-readable copyright statement for an entry or feed.</summary>
private AtomTextConstruct rights;
/// <summary>/feed/entry/category/@term property as a list of AtomCategories</summary>
private AtomCategoryCollection categories;
/// <summary>The "atom:summary" element is a Text construct that conveys a short summary, abstract or excerpt of an entry.</summary>
private AtomTextConstruct summary;
/// <summary>contains the content as an object</summary>
private AtomContent content;
/// <summary>atom:source element</summary>
private AtomSource source;
/// <summary>GData service to use</summary>
private IService service;
/// <summary>holds the owning feed</summary>
private AtomFeed feed;
// holds batch information for an entry
private GDataBatchEntryData batchData;
#endregion
#region Persistence overloads
/// <summary>Returns the constant representing this XML element.</summary>
public override string XmlName {
get { return AtomParserNameTable.XmlAtomEntryElement; }
}
/// <summary>checks to see if we are a batch feed, if so, adds the batchNS</summary>
/// <param name="writer">the xmlwriter, where we want to add default namespaces to</param>
protected override void AddOtherNamespaces(XmlWriter writer) {
base.AddOtherNamespaces(writer);
if (this.BatchData != null) {
Utilities.EnsureGDataBatchNamespace(writer);
}
}
/// <summary>checks if this is a namespace
/// declaration that we already added</summary>
/// <param name="node">XmlNode to check</param>
/// <returns>true if this node should be skipped </returns>
protected override bool SkipNode(XmlNode node) {
if (base.SkipNode(node)) {
return true;
}
Tracing.TraceMsg("in skipnode for node: " + node.Name + "--" + node.Value);
if (this.BatchData != null) {
if (node.NodeType == XmlNodeType.Attribute &&
node.Name.StartsWith("xmlns") &&
(String.Compare(node.Value, BaseNameTable.gBatchNamespace) == 0)) {
return true;
}
}
return false;
}
/// <summary>saves the inner state of the element</summary>
/// <param name="writer">the xmlWriter to save into </param>
protected override void SaveInnerXml(XmlWriter writer) {
// saving title
Tracing.TraceMsg("Entering save inner XML on AtomEntry");
if (this.batchData != null) {
this.batchData.Save(writer);
}
if (this.title != null) {
Tracing.TraceMsg("Saving Title: " + this.Title.Text);
this.Title.SaveToXml(writer);
}
if (this.id != null) {
this.Id.SaveToXml(writer);
}
foreach (AtomLink link in this.Links) {
link.SaveToXml(writer);
}
foreach (AtomPerson person in this.Authors) {
person.SaveToXml(writer);
}
foreach (AtomPerson person in this.Contributors) {
person.SaveToXml(writer);
}
foreach (AtomCategory category in this.Categories) {
category.SaveToXml(writer);
}
if (this.rights != null) {
this.Rights.SaveToXml(writer);
}
if (this.summary != null) {
this.Summary.SaveToXml(writer);
}
if (this.content != null) {
this.Content.SaveToXml(writer);
}
if (this.source != null) {
this.Source.SaveToXml(writer);
}
WriteLocalDateTimeElement(writer, AtomParserNameTable.XmlUpdatedElement, this.Updated);
WriteLocalDateTimeElement(writer, AtomParserNameTable.XmlPublishedElement, this.Published);
}
#endregion
/// <summary>
/// default AtomEntry constructor. Adds the AppControl element
/// as a default extension
/// </summary>
public AtomEntry() {
this.AddExtension(new AppControl());
}
/// <summary>Read only accessor for feed</summary>
public AtomFeed Feed {
get { return this.feed; }
}
/// <summary>internal method to set the feed</summary>
internal void setFeed(AtomFeed feed) {
if (feed != null) {
this.Dirty = true;
this.Service = feed.Service;
}
this.feed = feed;
}
/// <summary>helper method to create a new, decoupled entry based on a feedEntry</summary>
/// <param name="entryToImport">the entry from a feed that you want to put somewhere else</param>
/// <returns> the new entry ready to be inserted</returns>
public static AtomEntry ImportFromFeed(AtomEntry entryToImport) {
Tracing.Assert(entryToImport != null, "entryToImport should not be null");
if (entryToImport == null) {
throw new ArgumentNullException("entryToImport");
}
AtomEntry entry = null;
entry = (AtomEntry)Activator.CreateInstance(entryToImport.GetType());
entry.CopyEntry(entryToImport);
entry.Id = null;
// if the source is empty, set the source to the old feed
if (entry.Source == null) {
entry.Source = entryToImport.Feed;
}
Tracing.TraceInfo("Imported entry: " + entryToImport.Title.Text + " to: " + entry.Title.Text);
return entry;
}
/// <summary>accessor method for the GData Service to use</summary>
public IService Service {
get { return this.service; }
set { this.Dirty = true; this.service = value; }
}
/// <summary>accessor to the batchdata for the entry</summary>
/// <returns>GDataBatch object</returns>
public GDataBatchEntryData BatchData {
get { return this.batchData; }
set { this.batchData = value; }
}
/// <summary>accessor method public Uri EditUri</summary>
/// <returns> </returns>
public AtomUri EditUri {
get {
AtomLink link = this.Links.FindService(BaseNameTable.ServiceEdit, AtomLink.ATOM_TYPE);
// scan the link collection
return link == null ? null : link.HRef;
}
set {
AtomLink link = this.Links.FindService(BaseNameTable.ServiceEdit, AtomLink.ATOM_TYPE);
if (link == null) {
link = new AtomLink(AtomLink.ATOM_TYPE, BaseNameTable.ServiceEdit);
this.Links.Add(link);
}
link.HRef = value;
}
}
/// <summary>accessor for the self URI</summary>
/// <returns> </returns>
public AtomUri SelfUri {
get {
AtomLink link = this.Links.FindService(BaseNameTable.ServiceSelf, AtomLink.ATOM_TYPE);
// scan the link collection
return link == null ? null : link.HRef;
}
set {
AtomLink link = this.Links.FindService(BaseNameTable.ServiceSelf, AtomLink.ATOM_TYPE);
if (link == null) {
link = new AtomLink(AtomLink.ATOM_TYPE, BaseNameTable.ServiceSelf);
this.Links.Add(link);
}
link.HRef = value;
}
}
/// <summary>accessor to find the edit-media link</summary>
/// <returns>the Uri as AtomUri to the media upload Service</returns>
public AtomUri MediaUri {
get {
// scan the link collection
AtomLink link = this.Links.FindService(BaseNameTable.ServiceMedia, null);
return link == null ? null : link.HRef;
}
set {
AtomLink link = this.Links.FindService(BaseNameTable.ServiceMedia, null);
if (link == null) {
link = new AtomLink(null, BaseNameTable.ServiceMedia);
this.Links.Add(link);
}
link.HRef = value;
}
}
/// <summary>accessor to find the alternate link, in HTML only
/// The method scans the link collection for a link that is of type rel=alternate
/// and has a media type of HTML, otherwise it return NULL. The same is true for setting this.
/// If you need to use a rel/alternate with a different media type, you need
/// to use the links collection directly</summary>
/// <returns>the Uri as AtomUri to HTML representation</returns>
public AtomUri AlternateUri {
get {
// scan the link collection
AtomLink link = this.Links.FindService(BaseNameTable.ServiceAlternate, AtomLink.HTML_TYPE);
return link == null ? null : link.HRef;
}
set {
AtomLink link = this.Links.FindService(BaseNameTable.ServiceAlternate, AtomLink.HTML_TYPE);
if (link == null) {
link = new AtomLink(AtomLink.HTML_TYPE, BaseNameTable.ServiceAlternate);
this.Links.Add(link);
}
link.HRef = value;
}
}
/// <summary>accessor method public string Feed</summary>
/// <returns>returns the Uri as string for the feed service </returns>
public string FeedUri {
get {
AtomLink link = this.Links.FindService(BaseNameTable.ServiceFeed, AtomLink.ATOM_TYPE);
// scan the link collection
return link == null ? null : Utilities.CalculateUri(this.Base, this.ImpliedBase, link.HRef.ToString());
}
set {
AtomLink link = this.Links.FindService(BaseNameTable.ServiceFeed, AtomLink.ATOM_TYPE);
if (link == null) {
link = new AtomLink(AtomLink.ATOM_TYPE, BaseNameTable.ServiceFeed);
this.Links.Add(link);
}
link.HRef = new AtomUri(value);
}
}
/// <summary>accessor method public DateTime UpdateDate</summary>
/// <returns> </returns>
public DateTime Updated {
get { return this.lastUpdateDate; }
set { this.Dirty = true; this.lastUpdateDate = value; }
}
/// <summary>accessor method public DateTime PublicationDate</summary>
/// <returns> </returns>
public DateTime Published {
get { return this.publicationDate; }
set { this.Dirty = true; this.publicationDate = value; }
}
/// <summary>
/// returns the app:control element
/// </summary>
/// <returns></returns>
public AppControl AppControl {
get {
return FindExtension(BaseNameTable.XmlElementPubControl,
BaseNameTable.AppPublishingNamespace(this)) as AppControl;
}
set {
ReplaceExtension(BaseNameTable.XmlElementPubControl,
BaseNameTable.AppPublishingNamespace(this),
value);
}
}
/// <summary>specifies if app:control/app:draft is yes or no.
/// this is determined by walking the extension elements collection</summary>
/// <returns>true if this is a draft element</returns>
public bool IsDraft {
get {
if (this.AppControl != null && this.AppControl.Draft != null) {
return this.AppControl.Draft.BooleanValue;
}
return false;
}
set {
this.Dirty = true;
if (this.AppControl == null) {
this.AppControl = new AppControl();
}
if (this.AppControl.Draft == null) {
this.AppControl.Draft = new AppDraft();
}
this.AppControl.Draft.BooleanValue = value;
}
}
/// <summary>accessor method public Contributors AtomPersonCollection</summary>
/// <returns> </returns>
public AtomPersonCollection Authors {
get {
if (this.authors == null) {
this.authors = new AtomPersonCollection();
}
return this.authors;
}
}
/// <summary>accessor method public Contributors AtomPersonCollection</summary>
/// <returns> </returns>
public AtomPersonCollection Contributors {
get {
if (this.contributors == null) {
this.contributors = new AtomPersonCollection();
}
return this.contributors;
}
}
/// <summary>accessor method public string Content</summary>
/// <returns> </returns>
public AtomContent Content {
get {
if (this.content == null) {
this.content = new AtomContent();
}
return this.content;
}
set { this.Dirty = true; this.content = value; }
}
/// <summary>accessor method public string Summary</summary>
/// <returns> </returns>
public AtomTextConstruct Summary {
get {
if (this.summary == null) {
this.summary = new AtomTextConstruct(AtomTextConstructElementType.Summary);
}
return this.summary;
}
set { this.Dirty = true; this.summary = value; }
}
/// <summary>accessor method public Links AtomLinkCollection</summary>
/// <returns> </returns>
public AtomLinkCollection Links {
get {
if (this.links == null) {
this.links = new AtomLinkCollection();
}
return this.links;
}
}
/// <summary>holds an array of AtomCategory objects</summary>
/// <returns> </returns>
public AtomCategoryCollection Categories {
get {
if (this.categories == null) {
this.categories = new AtomCategoryCollection();
}
return this.categories;
}
}
/// <summary>accessor method public AtomId Id</summary>
/// <returns> </returns>
public AtomId Id {
get {
if (this.id == null) {
this.id = new AtomId();
}
return this.id;
}
set { this.Dirty = true; this.id = value; }
}
/// <summary>accessor method public AtomTextConstruct Title</summary>
/// <returns> </returns>
public AtomTextConstruct Title {
get {
if (this.title == null) {
this.title = new AtomTextConstruct(AtomTextConstructElementType.Title);
}
return this.title;
}
set { this.Dirty = true; this.title = value; }
}
/// <summary>if the entry was copied, represents the source</summary>
/// <returns> </returns>
public AtomSource Source {
get { return this.source; }
set {
this.Dirty = true;
AtomFeed feed = value as AtomFeed;
if (feed != null) {
Tracing.TraceInfo("need to copy a feed to a source");
this.source = new AtomSource(feed);
} else {
this.source = value;
}
}
}
/// <summary>accessor method public string rights</summary>
/// <returns> </returns>
public AtomTextConstruct Rights {
get {
if (this.rights == null) {
this.rights = new AtomTextConstruct(AtomTextConstructElementType.Rights);
}
return this.rights;
}
set { this.Dirty = true; this.rights = value; }
}
#region EDITING
/// <summary>returns whether or not the entry is read-only</summary>
public bool ReadOnly {
get {
return this.EditUri == null;
}
}
/// <summary>commits the item to the server</summary>
/// <returns>throws an exception if an error occured updating, returns
/// the updated entry from the service</returns>
public AtomEntry Update() {
if (this.Service == null) {
throw new InvalidOperationException("No Service object set");
}
AtomEntry updatedEntry = Service.Update(this);
if (updatedEntry != null) {
this.CopyEntry(updatedEntry);
this.MarkElementDirty(false);
return updatedEntry;
}
return null;
}
/// <summary>deletes the item from the server</summary>
/// <returns>throws an exception if an error occured updating</returns>
public void Delete() {
if (this.Service == null) {
throw new InvalidOperationException("No Service object set");
}
Service.Delete(this);
}
/// <summary>takes the updated entry returned and sets the properties to this object</summary>
/// <param name="updatedEntry"> </param>
protected void CopyEntry(AtomEntry updatedEntry) {
Tracing.Assert(updatedEntry != null, "updatedEntry should not be null");
if (updatedEntry == null) {
throw new ArgumentNullException("updatedEntry");
}
this.title = updatedEntry.Title;
this.authors = updatedEntry.Authors;
this.id = updatedEntry.Id;
this.links = updatedEntry.Links;
this.lastUpdateDate = updatedEntry.Updated;
this.publicationDate = updatedEntry.Published;
this.authors = updatedEntry.Authors;
this.rights = updatedEntry.Rights;
this.categories = updatedEntry.Categories;
this.summary = updatedEntry.Summary;
this.content = updatedEntry.Content;
this.source = updatedEntry.Source;
this.ExtensionElements.Clear();
foreach (IExtensionElementFactory extension in updatedEntry.ExtensionElements) {
this.ExtensionElements.Add(extension);
}
}
#endregion
/// <summary>
/// this is the subclassing method for AtomBase derived
/// classes to overload what childelements should be created
/// needed to create CustomLink type objects, like WebContentLink etc
/// </summary>
/// <param name="reader">The XmlReader that tells us what we are working with</param>
/// <param name="parser">the parser is primarily used for nametable comparisons</param>
/// <returns>AtomBase</returns>
public override AtomBase CreateAtomSubElement(XmlReader reader, AtomFeedParser parser) {
if (reader == null) {
throw new ArgumentNullException("reader");
}
if (parser == null) {
throw new ArgumentNullException("parser");
}
Object localname = reader.LocalName;
if (localname.Equals(parser.Nametable.Source)) {
return new AtomSource();
} else if (localname.Equals(parser.Nametable.Content)) {
return new AtomContent();
}
return base.CreateAtomSubElement(reader, parser);
}
#region overloaded for property changes, xml:base
/// <summary>just go down the child collections</summary>
/// <param name="uriBase"> as currently calculated</param>
internal override void BaseUriChanged(AtomUri uriBase) {
base.BaseUriChanged(uriBase);
// now pass it to the properties.
uriBase = new AtomUri(Utilities.CalculateUri(this.Base, uriBase, null));
if (this.Title != null) {
this.Title.BaseUriChanged(uriBase);
}
if (this.Id != null) {
this.Id.BaseUriChanged(uriBase);
}
foreach (AtomLink link in this.Links) {
link.BaseUriChanged(uriBase);
}
foreach (AtomPerson person in this.Authors) {
person.BaseUriChanged(uriBase);
}
foreach (AtomPerson person in this.Contributors) {
person.BaseUriChanged(uriBase);
}
foreach (AtomCategory category in this.Categories) {
category.BaseUriChanged(uriBase);
}
if (this.Rights != null) {
this.Rights.BaseUriChanged(uriBase);
}
if (this.Summary != null) {
this.Summary.BaseUriChanged(uriBase);
}
if (this.Content != null) {
this.Content.BaseUriChanged(uriBase);
}
if (this.Source != null) {
this.Source.BaseUriChanged(uriBase);
}
}
/// <summary>calls the action on this object and all children</summary>
/// <param name="action">an IAtomBaseAction interface to call </param>
/// <returns>true or false, pending outcome</returns>
public override bool WalkTree(IBaseWalkerAction action) {
if (base.WalkTree(action)) {
return true;
}
foreach (AtomPerson person in this.Authors) {
if (person.WalkTree(action)) {
return true;
}
}
// saving Contributors
foreach (AtomPerson person in this.Contributors) {
if (person.WalkTree(action)) {
return true;
}
}
// saving Categories
foreach (AtomCategory category in this.Categories) {
if (category.WalkTree(action)) {
return true;
}
}
if (this.id != null) {
if (this.id.WalkTree(action)) {
return true;
}
}
// save the Links
foreach (AtomLink link in this.Links) {
if (link.WalkTree(action)) {
return true;
}
}
if (this.rights != null) {
if (this.rights.WalkTree(action)) {
return true;
}
}
if (this.title != null) {
if (this.title.WalkTree(action)) {
return true;
}
}
if (this.summary != null) {
if (this.summary.WalkTree(action)) {
return true;
}
}
if (this.content != null) {
if (this.content.WalkTree(action)) {
return true;
}
}
if (this.source != null) {
if (this.source.WalkTree(action)) {
return true;
}
}
// nothing dirty at all
return false;
}
#endregion
/// <summary>
/// Parses the inner state of the element
/// </summary>
/// <param name="e">The extension element that should be added to this entry</param>
/// <param name="parser">The AtomFeedParser that called this</param>
public virtual void Parse(ExtensionElementEventArgs e, AtomFeedParser parser) {
if (e == null) {
throw new ArgumentNullException("e");
}
Tracing.TraceMsg("Entering Parse on AbstractEntry");
XmlNode node = e.ExtensionElement;
if (this.ExtensionFactories != null && this.ExtensionFactories.Count > 0) {
Tracing.TraceMsg("Entring default Parsing for AbstractEntry");
IExtensionElementFactory f = FindExtensionFactory(node.LocalName,
node.NamespaceURI);
if (f != null) {
this.ExtensionElements.Add(f.CreateInstance(node, parser));
e.DiscardEntry = true;
}
}
}
}
}
| |
/* Copyright (C) 2008-2018 Peter Palotas, Jeffrey Jangli, Alexandr Normuradov
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
using System;
using System.IO;
using System.Security;
namespace Alphaleonis.Win32.Filesystem
{
public static partial class File
{
/// <summary>[AlphaFS] Moves a specified file to a new location, providing the option to specify a new file name.</summary>
/// <returns>A <see cref="CopyMoveResult"/> class with the status of the Move action.</returns>
/// <remarks>
/// <para>This method works across disk volumes.</para>
/// <para>Note that if you attempt to replace a file by moving a file of the same name into that directory, you get an <see cref="IOException"/>.</para>
/// <para>You cannot use the Move method to overwrite an existing file.</para>
/// <para>Whenever possible, avoid using short file names (such as <c>XXXXXX~1.XXX</c>) with this method.</para>
/// <para>If two files have equivalent short file names then this method may fail and raise an exception and/or result in undesirable behavior.</para>
/// </remarks>
/// <exception cref="ArgumentException"/>
/// <exception cref="ArgumentNullException"/>
/// <exception cref="DirectoryNotFoundException"/>
/// <exception cref="FileNotFoundException"/>
/// <exception cref="IOException"/>
/// <exception cref="NotSupportedException"/>
/// <exception cref="UnauthorizedAccessException"/>
/// <param name="transaction">The transaction.</param>
/// <param name="sourcePath">The name of the file to move.</param>
/// <param name="destinationPath">The new path for the file.</param>
[SecurityCritical]
public static CopyMoveResult MoveTransacted(KernelTransaction transaction, string sourcePath, string destinationPath)
{
return CopyMoveCore(false, new CopyMoveArguments
{
Transaction = transaction,
MoveOptions = MoveOptions.CopyAllowed
}, false, false, sourcePath, destinationPath, null);
}
/// <summary>[AlphaFS] Moves a specified file to a new location, providing the option to specify a new file name.</summary>
/// <returns>A <see cref="CopyMoveResult"/> class with the status of the Move action.</returns>
/// <remarks>
/// <para>This method works across disk volumes.</para>
/// <para>Note that if you attempt to replace a file by moving a file of the same name into that directory, you get an <see cref="IOException"/>.</para>
/// <para>You cannot use the Move method to overwrite an existing file.</para>
/// <para>Whenever possible, avoid using short file names (such as <c>XXXXXX~1.XXX</c>) with this method.</para>
/// <para>If two files have equivalent short file names then this method may fail and raise an exception and/or result in undesirable behavior.</para>
/// </remarks>
/// <exception cref="ArgumentException"/>
/// <exception cref="ArgumentNullException"/>
/// <exception cref="DirectoryNotFoundException"/>
/// <exception cref="FileNotFoundException"/>
/// <exception cref="IOException"/>
/// <exception cref="NotSupportedException"/>
/// <exception cref="UnauthorizedAccessException"/>
/// <param name="transaction">The transaction.</param>
/// <param name="sourcePath">The name of the file to move.</param>
/// <param name="destinationPath">The new path for the file.</param>
/// <param name="pathFormat">Indicates the format of the path parameter(s).</param>
[SecurityCritical]
public static CopyMoveResult MoveTransacted(KernelTransaction transaction, string sourcePath, string destinationPath, PathFormat pathFormat)
{
return CopyMoveCore(false, new CopyMoveArguments
{
Transaction = transaction,
MoveOptions = MoveOptions.CopyAllowed,
PathFormat = pathFormat
}, false, false, sourcePath, destinationPath, null);
}
/// <summary>[AlphaFS] Moves a specified file to a new location, providing the option to specify a new file name.</summary>
/// <returns>A <see cref="CopyMoveResult"/> class with the status of the Move action.</returns>
/// <remarks>
/// <para>This method works across disk volumes.</para>
/// <para>Note that if you attempt to replace a file by moving a file of the same name into that directory, you get an <see cref="IOException"/>.</para>
/// <para>You cannot use the Move method to overwrite an existing file.</para>
/// <para>Whenever possible, avoid using short file names (such as <c>XXXXXX~1.XXX</c>) with this method.</para>
/// <para>If two files have equivalent short file names then this method may fail and raise an exception and/or result in undesirable behavior.</para>
/// </remarks>
/// <exception cref="ArgumentException"/>
/// <exception cref="ArgumentNullException"/>
/// <exception cref="DirectoryNotFoundException"/>
/// <exception cref="FileNotFoundException"/>
/// <exception cref="IOException"/>
/// <exception cref="NotSupportedException"/>
/// <exception cref="UnauthorizedAccessException"/>
/// <param name="transaction">The transaction.</param>
/// <param name="sourcePath">The name of the file to move.</param>
/// <param name="destinationPath">The new path for the file.</param>
/// <param name="retry">The number of retries on failed copies.</param>
/// <param name="retryTimeout">The wait time in seconds between retries.</param>
[SecurityCritical]
public static CopyMoveResult MoveTransacted(KernelTransaction transaction, string sourcePath, string destinationPath, int retry, int retryTimeout)
{
return CopyMoveCore(false, new CopyMoveArguments
{
Retry = retry,
RetryTimeout = retryTimeout,
Transaction = transaction,
MoveOptions = MoveOptions.CopyAllowed
}, false, false, sourcePath, destinationPath, null);
}
/// <summary>[AlphaFS] Moves a specified file to a new location, providing the option to specify a new file name.</summary>
/// <returns>A <see cref="CopyMoveResult"/> class with the status of the Move action.</returns>
/// <remarks>
/// <para>This method works across disk volumes.</para>
/// <para>Note that if you attempt to replace a file by moving a file of the same name into that directory, you get an <see cref="IOException"/>.</para>
/// <para>You cannot use the Move method to overwrite an existing file.</para>
/// <para>Whenever possible, avoid using short file names (such as <c>XXXXXX~1.XXX</c>) with this method.</para>
/// <para>If two files have equivalent short file names then this method may fail and raise an exception and/or result in undesirable behavior.</para>
/// </remarks>
/// <exception cref="ArgumentException"/>
/// <exception cref="ArgumentNullException"/>
/// <exception cref="DirectoryNotFoundException"/>
/// <exception cref="FileNotFoundException"/>
/// <exception cref="IOException"/>
/// <exception cref="NotSupportedException"/>
/// <exception cref="UnauthorizedAccessException"/>
/// <param name="transaction">The transaction.</param>
/// <param name="sourcePath">The name of the file to move.</param>
/// <param name="destinationPath">The new path for the file.</param>
/// <param name="retry">The number of retries on failed copies.</param>
/// <param name="retryTimeout">The wait time in seconds between retries.</param>
/// <param name="pathFormat">Indicates the format of the path parameter(s).</param>
[SecurityCritical]
public static CopyMoveResult MoveTransacted(KernelTransaction transaction, string sourcePath, string destinationPath, int retry, int retryTimeout, PathFormat pathFormat)
{
return CopyMoveCore(false, new CopyMoveArguments
{
Retry = retry,
RetryTimeout = retryTimeout,
Transaction = transaction,
MoveOptions = MoveOptions.CopyAllowed,
PathFormat = pathFormat
}, false, false, sourcePath, destinationPath, null);
}
/// <summary>[AlphaFS] Moves a specified file to a new location, providing the option to specify a new file name.</summary>
/// <returns>A <see cref="CopyMoveResult"/> class with the status of the Move action.</returns>
/// <remarks>
/// <para>This method works across disk volumes.</para>
/// <para>Note that if you attempt to replace a file by moving a file of the same name into that directory, you get an <see cref="IOException"/>.</para>
/// <para>You cannot use the Move method to overwrite an existing file.</para>
/// <para>Whenever possible, avoid using short file names (such as <c>XXXXXX~1.XXX</c>) with this method.</para>
/// <para>If two files have equivalent short file names then this method may fail and raise an exception and/or result in undesirable behavior.</para>
/// </remarks>
/// <exception cref="ArgumentException"/>
/// <exception cref="ArgumentNullException"/>
/// <exception cref="DirectoryNotFoundException"/>
/// <exception cref="FileNotFoundException"/>
/// <exception cref="IOException"/>
/// <exception cref="NotSupportedException"/>
/// <exception cref="UnauthorizedAccessException"/>
/// <param name="transaction">The transaction.</param>
/// <param name="sourcePath">The name of the file to move.</param>
/// <param name="destinationPath">The new path for the file.</param>
/// <param name="progressHandler">A callback function that is called each time another portion of the file has been moved. This parameter can be <c>null</c>.</param>
/// <param name="userProgressData">The argument to be passed to the callback function. This parameter can be <c>null</c>.</param>
[SecurityCritical]
public static CopyMoveResult MoveTransacted(KernelTransaction transaction, string sourcePath, string destinationPath, CopyMoveProgressRoutine progressHandler, object userProgressData)
{
return CopyMoveCore(false, new CopyMoveArguments
{
Transaction = transaction,
MoveOptions = MoveOptions.CopyAllowed,
ProgressHandler = progressHandler,
UserProgressData = userProgressData
}, false, false, sourcePath, destinationPath, null);
}
/// <summary>[AlphaFS] Moves a specified file to a new location, providing the option to specify a new file name.</summary>
/// <returns>A <see cref="CopyMoveResult"/> class with the status of the Move action.</returns>
/// <remarks>
/// <para>This method works across disk volumes.</para>
/// <para>Note that if you attempt to replace a file by moving a file of the same name into that directory, you get an <see cref="IOException"/>.</para>
/// <para>You cannot use the Move method to overwrite an existing file.</para>
/// <para>Whenever possible, avoid using short file names (such as <c>XXXXXX~1.XXX</c>) with this method.</para>
/// <para>If two files have equivalent short file names then this method may fail and raise an exception and/or result in undesirable behavior.</para>
/// </remarks>
/// <exception cref="ArgumentException"/>
/// <exception cref="ArgumentNullException"/>
/// <exception cref="DirectoryNotFoundException"/>
/// <exception cref="FileNotFoundException"/>
/// <exception cref="IOException"/>
/// <exception cref="NotSupportedException"/>
/// <exception cref="UnauthorizedAccessException"/>
/// <param name="transaction">The transaction.</param>
/// <param name="sourcePath">The name of the file to move.</param>
/// <param name="destinationPath">The new path for the file.</param>
/// <param name="progressHandler">A callback function that is called each time another portion of the file has been moved. This parameter can be <c>null</c>.</param>
/// <param name="userProgressData">The argument to be passed to the callback function. This parameter can be <c>null</c>.</param>
/// <param name="pathFormat">Indicates the format of the path parameter(s).</param>
[SecurityCritical]
public static CopyMoveResult MoveTransacted(KernelTransaction transaction, string sourcePath, string destinationPath, CopyMoveProgressRoutine progressHandler, object userProgressData, PathFormat pathFormat)
{
return CopyMoveCore(false, new CopyMoveArguments
{
Transaction = transaction,
MoveOptions = MoveOptions.CopyAllowed,
ProgressHandler = progressHandler,
UserProgressData = userProgressData,
PathFormat = pathFormat
}, false, false, sourcePath, destinationPath, null);
}
/// <summary>[AlphaFS] Moves a specified file to a new location, providing the option to specify a new file name.</summary>
/// <returns>A <see cref="CopyMoveResult"/> class with the status of the Move action.</returns>
/// <remarks>
/// <para>This method works across disk volumes.</para>
/// <para>Note that if you attempt to replace a file by moving a file of the same name into that directory, you get an <see cref="IOException"/>.</para>
/// <para>You cannot use the Move method to overwrite an existing file.</para>
/// <para>Whenever possible, avoid using short file names (such as <c>XXXXXX~1.XXX</c>) with this method.</para>
/// <para>If two files have equivalent short file names then this method may fail and raise an exception and/or result in undesirable behavior.</para>
/// </remarks>
/// <exception cref="ArgumentException"/>
/// <exception cref="ArgumentNullException"/>
/// <exception cref="DirectoryNotFoundException"/>
/// <exception cref="FileNotFoundException"/>
/// <exception cref="IOException"/>
/// <exception cref="NotSupportedException"/>
/// <exception cref="UnauthorizedAccessException"/>
/// <param name="transaction">The transaction.</param>
/// <param name="sourcePath">The name of the file to move.</param>
/// <param name="destinationPath">The new path for the file.</param>
/// <param name="retry">The number of retries on failed copies.</param>
/// <param name="retryTimeout">The wait time in seconds between retries.</param>
/// <param name="progressHandler">A callback function that is called each time another portion of the file has been moved. This parameter can be <c>null</c>.</param>
/// <param name="userProgressData">The argument to be passed to the callback function. This parameter can be <c>null</c>.</param>
[SecurityCritical]
public static CopyMoveResult MoveTransacted(KernelTransaction transaction, string sourcePath, string destinationPath, int retry, int retryTimeout, CopyMoveProgressRoutine progressHandler, object userProgressData)
{
return CopyMoveCore(false, new CopyMoveArguments
{
Retry = retry,
RetryTimeout = retryTimeout,
Transaction = transaction,
MoveOptions = MoveOptions.CopyAllowed,
ProgressHandler = progressHandler,
UserProgressData = userProgressData
}, false, false, sourcePath, destinationPath, null);
}
/// <summary>[AlphaFS] Moves a specified file to a new location, providing the option to specify a new file name.</summary>
/// <returns>A <see cref="CopyMoveResult"/> class with the status of the Move action.</returns>
/// <remarks>
/// <para>This method works across disk volumes.</para>
/// <para>Note that if you attempt to replace a file by moving a file of the same name into that directory, you get an <see cref="IOException"/>.</para>
/// <para>You cannot use the Move method to overwrite an existing file.</para>
/// <para>Whenever possible, avoid using short file names (such as <c>XXXXXX~1.XXX</c>) with this method.</para>
/// <para>If two files have equivalent short file names then this method may fail and raise an exception and/or result in undesirable behavior.</para>
/// </remarks>
/// <exception cref="ArgumentException"/>
/// <exception cref="ArgumentNullException"/>
/// <exception cref="DirectoryNotFoundException"/>
/// <exception cref="FileNotFoundException"/>
/// <exception cref="IOException"/>
/// <exception cref="NotSupportedException"/>
/// <exception cref="UnauthorizedAccessException"/>
/// <param name="transaction">The transaction.</param>
/// <param name="sourcePath">The name of the file to move.</param>
/// <param name="destinationPath">The new path for the file.</param>
/// <param name="progressHandler">A callback function that is called each time another portion of the file has been moved. This parameter can be <c>null</c>.</param>
/// <param name="userProgressData">The argument to be passed to the callback function. This parameter can be <c>null</c>.</param>
/// <param name="retry">The number of retries on failed copies.</param>
/// <param name="retryTimeout">The wait time in seconds between retries.</param>
/// <param name="pathFormat">Indicates the format of the path parameter(s).</param>
[SecurityCritical]
public static CopyMoveResult MoveTransacted(KernelTransaction transaction, string sourcePath, string destinationPath, int retry, int retryTimeout, CopyMoveProgressRoutine progressHandler, object userProgressData, PathFormat pathFormat)
{
return CopyMoveCore(false, new CopyMoveArguments
{
Retry = retry,
RetryTimeout = retryTimeout,
Transaction = transaction,
MoveOptions = MoveOptions.CopyAllowed,
ProgressHandler = progressHandler,
UserProgressData = userProgressData,
PathFormat = pathFormat
}, false, false, sourcePath, destinationPath, null);
}
/// <summary>[AlphaFS] Moves a specified file to a new location, providing the option to specify a new file name.</summary>
/// <returns>A <see cref="CopyMoveResult"/> class with the status of the Move action.</returns>
/// <remarks>
/// <para>This method works across disk volumes.</para>
/// <para>Note that if you attempt to replace a file by moving a file of the same name into that directory, you get an <see cref="IOException"/>.</para>
/// <para>You cannot use the Move method to overwrite an existing file.</para>
/// <para>Whenever possible, avoid using short file names (such as <c>XXXXXX~1.XXX</c>) with this method.</para>
/// <para>If two files have equivalent short file names then this method may fail and raise an exception and/or result in undesirable behavior.</para>
/// </remarks>
/// <exception cref="ArgumentException"/>
/// <exception cref="ArgumentNullException"/>
/// <exception cref="DirectoryNotFoundException"/>
/// <exception cref="FileNotFoundException"/>
/// <exception cref="IOException"/>
/// <exception cref="NotSupportedException"/>
/// <exception cref="UnauthorizedAccessException"/>
/// <param name="transaction">The transaction.</param>
/// <param name="sourcePath">The name of the file to move.</param>
/// <param name="destinationPath">The new path for the file.</param>
/// <param name="moveOptions"><see cref="MoveOptions"/> that specify how the file is to be moved. This parameter can be <c>null</c>.</param>
[SecurityCritical]
public static CopyMoveResult MoveTransacted(KernelTransaction transaction, string sourcePath, string destinationPath, MoveOptions moveOptions)
{
return CopyMoveCore(false, new CopyMoveArguments
{
Transaction = transaction,
MoveOptions = moveOptions
}, false, false, sourcePath, destinationPath, null);
}
/// <summary>[AlphaFS] Moves a specified file to a new location, providing the option to specify a new file name.</summary>
/// <returns>A <see cref="CopyMoveResult"/> class with the status of the Move action.</returns>
/// <remarks>
/// <para>This method works across disk volumes.</para>
/// <para>Note that if you attempt to replace a file by moving a file of the same name into that directory, you get an <see cref="IOException"/>.</para>
/// <para>You cannot use the Move method to overwrite an existing file.</para>
/// <para>Whenever possible, avoid using short file names (such as <c>XXXXXX~1.XXX</c>) with this method.</para>
/// <para>If two files have equivalent short file names then this method may fail and raise an exception and/or result in undesirable behavior.</para>
/// </remarks>
/// <exception cref="ArgumentException"/>
/// <exception cref="ArgumentNullException"/>
/// <exception cref="DirectoryNotFoundException"/>
/// <exception cref="FileNotFoundException"/>
/// <exception cref="IOException"/>
/// <exception cref="NotSupportedException"/>
/// <exception cref="UnauthorizedAccessException"/>
/// <param name="transaction">The transaction.</param>
/// <param name="sourcePath">The name of the file to move.</param>
/// <param name="destinationPath">The new path for the file.</param>
/// <param name="moveOptions"><see cref="MoveOptions"/> that specify how the file is to be moved. This parameter can be <c>null</c>.</param>
/// <param name="pathFormat">Indicates the format of the path parameter(s).</param>
[SecurityCritical]
public static CopyMoveResult MoveTransacted(KernelTransaction transaction, string sourcePath, string destinationPath, MoveOptions moveOptions, PathFormat pathFormat)
{
return CopyMoveCore(false, new CopyMoveArguments
{
Transaction = transaction,
MoveOptions = moveOptions,
PathFormat = pathFormat
}, false, false, sourcePath, destinationPath, null);
}
/// <summary>[AlphaFS] Moves a specified file to a new location, providing the option to specify a new file name.</summary>
/// <returns>A <see cref="CopyMoveResult"/> class with the status of the Move action.</returns>
/// <remarks>
/// <para>This method works across disk volumes.</para>
/// <para>Note that if you attempt to replace a file by moving a file of the same name into that directory, you get an <see cref="IOException"/>.</para>
/// <para>You cannot use the Move method to overwrite an existing file.</para>
/// <para>Whenever possible, avoid using short file names (such as <c>XXXXXX~1.XXX</c>) with this method.</para>
/// <para>If two files have equivalent short file names then this method may fail and raise an exception and/or result in undesirable behavior.</para>
/// </remarks>
/// <exception cref="ArgumentException"/>
/// <exception cref="ArgumentNullException"/>
/// <exception cref="DirectoryNotFoundException"/>
/// <exception cref="FileNotFoundException"/>
/// <exception cref="IOException"/>
/// <exception cref="NotSupportedException"/>
/// <exception cref="UnauthorizedAccessException"/>
/// <param name="transaction">The transaction.</param>
/// <param name="sourcePath">The name of the file to move.</param>
/// <param name="destinationPath">The new path for the file.</param>
/// <param name="moveOptions"><see cref="MoveOptions"/> that specify how the file is to be moved. This parameter can be <c>null</c>.</param>
/// <param name="retry">The number of retries on failed copies.</param>
/// <param name="retryTimeout">The wait time in seconds between retries.</param>
[SecurityCritical]
public static CopyMoveResult MoveTransacted(KernelTransaction transaction, string sourcePath, string destinationPath, MoveOptions moveOptions, int retry, int retryTimeout)
{
return CopyMoveCore(false, new CopyMoveArguments
{
Retry = retry,
RetryTimeout = retryTimeout,
Transaction = transaction,
MoveOptions = moveOptions
}, false, false, sourcePath, destinationPath, null);
}
/// <summary>[AlphaFS] Moves a specified file to a new location, providing the option to specify a new file name.</summary>
/// <returns>A <see cref="CopyMoveResult"/> class with the status of the Move action.</returns>
/// <remarks>
/// <para>This method works across disk volumes.</para>
/// <para>Note that if you attempt to replace a file by moving a file of the same name into that directory, you get an <see cref="IOException"/>.</para>
/// <para>You cannot use the Move method to overwrite an existing file.</para>
/// <para>Whenever possible, avoid using short file names (such as <c>XXXXXX~1.XXX</c>) with this method.</para>
/// <para>If two files have equivalent short file names then this method may fail and raise an exception and/or result in undesirable behavior.</para>
/// </remarks>
/// <exception cref="ArgumentException"/>
/// <exception cref="ArgumentNullException"/>
/// <exception cref="DirectoryNotFoundException"/>
/// <exception cref="FileNotFoundException"/>
/// <exception cref="IOException"/>
/// <exception cref="NotSupportedException"/>
/// <exception cref="UnauthorizedAccessException"/>
/// <param name="transaction">The transaction.</param>
/// <param name="sourcePath">The name of the file to move.</param>
/// <param name="destinationPath">The new path for the file.</param>
/// <param name="moveOptions"><see cref="MoveOptions"/> that specify how the file is to be moved. This parameter can be <c>null</c>.</param>
/// <param name="retry">The number of retries on failed copies.</param>
/// <param name="retryTimeout">The wait time in seconds between retries.</param>
/// <param name="pathFormat">Indicates the format of the path parameter(s).</param>
[SecurityCritical]
public static CopyMoveResult MoveTransacted(KernelTransaction transaction, string sourcePath, string destinationPath, MoveOptions moveOptions, int retry, int retryTimeout, PathFormat pathFormat)
{
return CopyMoveCore(false, new CopyMoveArguments
{
Retry = retry,
RetryTimeout = retryTimeout,
Transaction = transaction,
MoveOptions = moveOptions,
PathFormat = pathFormat
}, false, false, sourcePath, destinationPath, null);
}
/// <summary>[AlphaFS] Moves a specified file to a new location, providing the option to specify a new file name.</summary>
/// <returns>A <see cref="CopyMoveResult"/> class with the status of the Move action.</returns>
/// <remarks>
/// <para>This method works across disk volumes.</para>
/// <para>Note that if you attempt to replace a file by moving a file of the same name into that directory, you get an <see cref="IOException"/>.</para>
/// <para>You cannot use the Move method to overwrite an existing file.</para>
/// <para>Whenever possible, avoid using short file names (such as <c>XXXXXX~1.XXX</c>) with this method.</para>
/// <para>If two files have equivalent short file names then this method may fail and raise an exception and/or result in undesirable behavior.</para>
/// </remarks>
/// <exception cref="ArgumentException"/>
/// <exception cref="ArgumentNullException"/>
/// <exception cref="DirectoryNotFoundException"/>
/// <exception cref="FileNotFoundException"/>
/// <exception cref="IOException"/>
/// <exception cref="NotSupportedException"/>
/// <exception cref="UnauthorizedAccessException"/>
/// <param name="transaction">The transaction.</param>
/// <param name="sourcePath">The name of the file to move.</param>
/// <param name="destinationPath">The new path for the file.</param>
/// <param name="moveOptions"><see cref="MoveOptions"/> that specify how the file is to be moved. This parameter can be <c>null</c>.</param>
/// <param name="progressHandler">A callback function that is called each time another portion of the file has been moved. This parameter can be <c>null</c>.</param>
/// <param name="userProgressData">The argument to be passed to the callback function. This parameter can be <c>null</c>.</param>
[SecurityCritical]
public static CopyMoveResult MoveTransacted(KernelTransaction transaction, string sourcePath, string destinationPath, MoveOptions moveOptions, CopyMoveProgressRoutine progressHandler, object userProgressData)
{
return CopyMoveCore(false, new CopyMoveArguments
{
Transaction = transaction,
MoveOptions = moveOptions,
ProgressHandler = progressHandler,
UserProgressData = userProgressData
}, false, false, sourcePath, destinationPath, null);
}
/// <summary>[AlphaFS] Moves a specified file to a new location, providing the option to specify a new file name.</summary>
/// <returns>A <see cref="CopyMoveResult"/> class with the status of the Move action.</returns>
/// <remarks>
/// <para>This method works across disk volumes.</para>
/// <para>Note that if you attempt to replace a file by moving a file of the same name into that directory, you get an <see cref="IOException"/>.</para>
/// <para>You cannot use the Move method to overwrite an existing file.</para>
/// <para>Whenever possible, avoid using short file names (such as <c>XXXXXX~1.XXX</c>) with this method.</para>
/// <para>If two files have equivalent short file names then this method may fail and raise an exception and/or result in undesirable behavior.</para>
/// </remarks>
/// <exception cref="ArgumentException"/>
/// <exception cref="ArgumentNullException"/>
/// <exception cref="DirectoryNotFoundException"/>
/// <exception cref="FileNotFoundException"/>
/// <exception cref="IOException"/>
/// <exception cref="NotSupportedException"/>
/// <exception cref="UnauthorizedAccessException"/>
/// <param name="transaction">The transaction.</param>
/// <param name="sourcePath">The name of the file to move.</param>
/// <param name="destinationPath">The new path for the file.</param>
/// <param name="moveOptions"><see cref="MoveOptions"/> that specify how the file is to be moved. This parameter can be <c>null</c>.</param>
/// <param name="progressHandler">A callback function that is called each time another portion of the file has been moved. This parameter can be <c>null</c>.</param>
/// <param name="userProgressData">The argument to be passed to the callback function. This parameter can be <c>null</c>.</param>
/// <param name="pathFormat">Indicates the format of the path parameter(s).</param>
[SecurityCritical]
public static CopyMoveResult MoveTransacted(KernelTransaction transaction, string sourcePath, string destinationPath, MoveOptions moveOptions, CopyMoveProgressRoutine progressHandler, object userProgressData, PathFormat pathFormat)
{
return CopyMoveCore(false, new CopyMoveArguments
{
Transaction = transaction,
MoveOptions = moveOptions,
ProgressHandler = progressHandler,
UserProgressData = userProgressData,
PathFormat = pathFormat
}, false, false, sourcePath, destinationPath, null);
}
/// <summary>[AlphaFS] Moves a specified file to a new location, providing the option to specify a new file name.</summary>
/// <returns>A <see cref="CopyMoveResult"/> class with the status of the Move action.</returns>
/// <remarks>
/// <para>This method works across disk volumes.</para>
/// <para>Note that if you attempt to replace a file by moving a file of the same name into that directory, you get an <see cref="IOException"/>.</para>
/// <para>You cannot use the Move method to overwrite an existing file.</para>
/// <para>Whenever possible, avoid using short file names (such as <c>XXXXXX~1.XXX</c>) with this method.</para>
/// <para>If two files have equivalent short file names then this method may fail and raise an exception and/or result in undesirable behavior.</para>
/// </remarks>
/// <exception cref="ArgumentException"/>
/// <exception cref="ArgumentNullException"/>
/// <exception cref="DirectoryNotFoundException"/>
/// <exception cref="FileNotFoundException"/>
/// <exception cref="IOException"/>
/// <exception cref="NotSupportedException"/>
/// <exception cref="UnauthorizedAccessException"/>
/// <param name="transaction">The transaction.</param>
/// <param name="sourcePath">The name of the file to move.</param>
/// <param name="destinationPath">The new path for the file.</param>
/// <param name="moveOptions"><see cref="MoveOptions"/> that specify how the file is to be moved. This parameter can be <c>null</c>.</param>
/// <param name="retry">The number of retries on failed copies.</param>
/// <param name="retryTimeout">The wait time in seconds between retries.</param>
/// <param name="progressHandler">A callback function that is called each time another portion of the file has been moved. This parameter can be <c>null</c>.</param>
/// <param name="userProgressData">The argument to be passed to the callback function. This parameter can be <c>null</c>.</param>
[SecurityCritical]
public static CopyMoveResult MoveTransacted(KernelTransaction transaction, string sourcePath, string destinationPath, MoveOptions moveOptions, int retry, int retryTimeout, CopyMoveProgressRoutine progressHandler, object userProgressData)
{
return CopyMoveCore(false, new CopyMoveArguments
{
Retry = retry,
RetryTimeout = retryTimeout,
Transaction = transaction,
MoveOptions = moveOptions,
ProgressHandler = progressHandler,
UserProgressData = userProgressData
}, false, false, sourcePath, destinationPath, null);
}
/// <summary>[AlphaFS] Moves a specified file to a new location, providing the option to specify a new file name.</summary>
/// <returns>A <see cref="CopyMoveResult"/> class with the status of the Move action.</returns>
/// <remarks>
/// <para>This method works across disk volumes.</para>
/// <para>Note that if you attempt to replace a file by moving a file of the same name into that directory, you get an <see cref="IOException"/>.</para>
/// <para>You cannot use the Move method to overwrite an existing file.</para>
/// <para>Whenever possible, avoid using short file names (such as <c>XXXXXX~1.XXX</c>) with this method.</para>
/// <para>If two files have equivalent short file names then this method may fail and raise an exception and/or result in undesirable behavior.</para>
/// </remarks>
/// <exception cref="ArgumentException"/>
/// <exception cref="ArgumentNullException"/>
/// <exception cref="DirectoryNotFoundException"/>
/// <exception cref="FileNotFoundException"/>
/// <exception cref="IOException"/>
/// <exception cref="NotSupportedException"/>
/// <exception cref="UnauthorizedAccessException"/>
/// <param name="transaction">The transaction.</param>
/// <param name="sourcePath">The name of the file to move.</param>
/// <param name="destinationPath">The new path for the file.</param>
/// <param name="moveOptions"><see cref="MoveOptions"/> that specify how the file is to be moved. This parameter can be <c>null</c>.</param>
/// <param name="retry">The number of retries on failed copies.</param>
/// <param name="retryTimeout">The wait time in seconds between retries.</param>
/// <param name="progressHandler">A callback function that is called each time another portion of the file has been moved. This parameter can be <c>null</c>.</param>
/// <param name="userProgressData">The argument to be passed to the callback function. This parameter can be <c>null</c>.</param>
/// <param name="pathFormat">Indicates the format of the path parameter(s).</param>
[SecurityCritical]
public static CopyMoveResult MoveTransacted(KernelTransaction transaction, string sourcePath, string destinationPath, MoveOptions moveOptions, int retry, int retryTimeout, CopyMoveProgressRoutine progressHandler, object userProgressData, PathFormat pathFormat)
{
return CopyMoveCore(false, new CopyMoveArguments
{
Retry = retry,
RetryTimeout = retryTimeout,
Transaction = transaction,
MoveOptions = moveOptions,
ProgressHandler = progressHandler,
UserProgressData = userProgressData,
PathFormat = pathFormat
}, false, false, sourcePath, destinationPath, null);
}
}
}
| |
using System;
using System.Globalization;
/// <summary>
/// String.System.IConvertible.ToUInt64(IFormatProvider provider)
/// This method supports the .NET Framework infrastructure and is
/// not intended to be used directly from your code.
/// Converts the value of the current String object to a 64-bit unsigned integer.
/// </summary>
class IConvertibleToUInt64
{
private const int c_MIN_STRING_LEN = 8;
private const int c_MAX_STRING_LEN = 256;
public static int Main()
{
IConvertibleToUInt64 iege = new IConvertibleToUInt64();
TestLibrary.TestFramework.BeginTestCase("for method: String.System.IConvertible.ToUInt64(IFormatProvider)");
if (iege.RunTests())
{
TestLibrary.TestFramework.EndTestCase();
TestLibrary.TestFramework.LogInformation("PASS");
return 100;
}
else
{
TestLibrary.TestFramework.EndTestCase();
TestLibrary.TestFramework.LogInformation("FAIL");
return 0;
}
}
public bool RunTests()
{
bool retVal = true;
TestLibrary.TestFramework.LogInformation("[Positive]");
retVal = PosTest1() && retVal;
retVal = PosTest2() && retVal;
retVal = PosTest3() && retVal;
retVal = PosTest4() && retVal;
TestLibrary.TestFramework.LogInformation("[Negative]");
retVal = NegTest1() && retVal;
retVal = NegTest2() && retVal;
retVal = NegTest3() && retVal;
return retVal;
}
#region Positive test scenarioes
public bool PosTest1()
{
bool retVal = true;
const string c_TEST_DESC = "PosTest1: Random numeric string";
const string c_TEST_ID = "P001";
string strSrc;
IFormatProvider provider;
UInt64 i;
bool expectedValue = true;
bool actualValue = false;
i = GetUInt64();
strSrc = i.ToString();
provider = null;
TestLibrary.TestFramework.BeginScenario(c_TEST_DESC);
try
{
actualValue = (i == ((IConvertible)strSrc).ToUInt64(provider));
if (actualValue != expectedValue)
{
string errorDesc = "Value is not " + expectedValue + " as expected: Actual(" + actualValue + ")";
errorDesc += GetDataString(strSrc, provider);
TestLibrary.TestFramework.LogError("001" + " TestId-" + c_TEST_ID, errorDesc);
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("002" + " TestId-" + c_TEST_ID, "Unexpected exception: " + e + GetDataString(strSrc, provider));
retVal = false;
}
return retVal;
}
public bool PosTest2()
{
bool retVal = true;
const string c_TEST_DESC = "PosTest2: Positive sign";
const string c_TEST_ID = "P002";
string strSrc;
IFormatProvider provider;
NumberFormatInfo ni = new NumberFormatInfo();
UInt64 i;
bool expectedValue = true;
bool actualValue = false;
i = GetUInt64();
ni.PositiveSign = TestLibrary.Generator.GetString(-55, false, false, c_MIN_STRING_LEN, c_MAX_STRING_LEN);
strSrc = ni.PositiveSign + i.ToString();
provider = (IFormatProvider)ni;
TestLibrary.TestFramework.BeginScenario(c_TEST_DESC);
try
{
actualValue = (i == ((IConvertible)strSrc).ToUInt64(provider));
if (actualValue != expectedValue)
{
string errorDesc = "Value is not " + expectedValue + " as expected: Actual(" + actualValue + ")";
errorDesc += GetDataString(strSrc, provider);
TestLibrary.TestFramework.LogError("003" + " TestId-" + c_TEST_ID, errorDesc);
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("004" + " TestId-" + c_TEST_ID, "Unexpected exception: " + e + GetDataString(strSrc, provider));
retVal = false;
}
return retVal;
}
public bool PosTest3()
{
bool retVal = true;
const string c_TEST_DESC = "PosTest3: string is Int64.MaxValue";
const string c_TEST_ID = "P003";
string strSrc;
IFormatProvider provider;
bool expectedValue = true;
bool actualValue = false;
strSrc = Int64.MaxValue.ToString();
provider = null;
TestLibrary.TestFramework.BeginScenario(c_TEST_DESC);
try
{
actualValue = (Int64.MaxValue == ((IConvertible)strSrc).ToUInt64(provider));
if (actualValue != expectedValue)
{
string errorDesc = "Value is not " + expectedValue + " as expected: Actual(" + actualValue + ")";
errorDesc += GetDataString(strSrc, provider);
TestLibrary.TestFramework.LogError("005" + " TestId-" + c_TEST_ID, errorDesc);
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("006" + " TestId-" + c_TEST_ID, "Unexpected exception: " + e + GetDataString(strSrc, provider));
retVal = false;
}
return retVal;
}
public bool PosTest4()
{
bool retVal = true;
const string c_TEST_DESC = "PosTest4: string is UInt32.MinValue";
const string c_TEST_ID = "P004";
string strSrc;
IFormatProvider provider;
bool expectedValue = true;
bool actualValue = false;
strSrc = UInt64.MinValue.ToString();
provider = null;
TestLibrary.TestFramework.BeginScenario(c_TEST_DESC);
try
{
actualValue = (UInt64.MinValue == ((IConvertible)strSrc).ToUInt64(provider));
if (actualValue != expectedValue)
{
string errorDesc = "Value is not " + expectedValue + " as expected: Actual(" + actualValue + ")";
errorDesc += GetDataString(strSrc, provider);
TestLibrary.TestFramework.LogError("007" + " TestId-" + c_TEST_ID, errorDesc);
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("008" + " TestId-" + c_TEST_ID, "Unexpected exception: " + e + GetDataString(strSrc, provider));
retVal = false;
}
return retVal;
}
#endregion // end for positive test scenarioes
#region Negative test scenarios
//FormatException
public bool NegTest1()
{
bool retVal = true;
const string c_TEST_DESC = "NegTest1: The value of String object cannot be parsed";
const string c_TEST_ID = "N001";
string strSrc;
IFormatProvider provider;
strSrc = "p" + TestLibrary.Generator.GetString(-55, false, 9, c_MAX_STRING_LEN);
provider = null;
TestLibrary.TestFramework.BeginScenario(c_TEST_DESC);
try
{
((IConvertible)strSrc).ToUInt64(provider);
TestLibrary.TestFramework.LogError("009" + "TestId-" + c_TEST_ID, "FormatException is not thrown as expected" + GetDataString(strSrc, provider));
retVal = false;
}
catch (FormatException)
{ }
catch (Exception e)
{
TestLibrary.TestFramework.LogError("010" + "TestId-" + c_TEST_ID, "Unexpected exception:" + e + GetDataString(strSrc, provider));
retVal = false;
}
return retVal;
}
public bool NegTest2()
{
bool retVal = true;
const string c_TEST_DESC = "NegTest2: The value of String object is a number greater than MaxValue";
const string c_TEST_ID = "N002";
string strSrc;
IFormatProvider provider;
UInt64 i;
i = GetUInt64();
strSrc = UInt64.MaxValue.ToString() + i.ToString();
provider = null;
TestLibrary.TestFramework.BeginScenario(c_TEST_DESC);
try
{
((IConvertible)strSrc).ToUInt64(provider);
TestLibrary.TestFramework.LogError("011" + "TestId-" + c_TEST_ID, "OverflowException is not thrown as expected" + GetDataString(strSrc, provider));
retVal = false;
}
catch (OverflowException)
{
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("012" + "TestId-" + c_TEST_ID, "Unexpected exception:" + e + GetDataString(strSrc, provider));
retVal = false;
}
return retVal;
}
public bool NegTest3()
{
bool retVal = true;
const string c_TEST_DESC = "NegTest3: The value of String object is a number less than MinValue";
const string c_TEST_ID = "N003";
string strSrc;
NumberFormatInfo ni = new NumberFormatInfo();
IFormatProvider provider;
UInt64 i;
i = GetUInt64();
strSrc = ni.NegativeSign + UInt64.MaxValue + i.ToString();
provider = (IFormatProvider)ni;
TestLibrary.TestFramework.BeginScenario(c_TEST_DESC);
try
{
((IConvertible)strSrc).ToUInt64(provider);
TestLibrary.TestFramework.LogError("013" + "TestId-" + c_TEST_ID, "OverflowException is not thrown as expected" + GetDataString(strSrc, provider));
retVal = false;
}
catch (OverflowException)
{ }
catch (Exception e)
{
TestLibrary.TestFramework.LogError("014" + "TestId-" + c_TEST_ID, "Unexpected exception:" + e + GetDataString(strSrc, provider));
retVal = false;
}
return retVal;
}
#endregion
// returns a non-negative UInt64 between 0 and UInt64.MadValue
public static UInt64 GetUInt64() // maybe bug
{
byte[] buffer = new byte[8];
UInt64 iVal;
TestLibrary.Generator.GetBytes(-55, buffer);
// convert to UInt64
iVal = 0;
for (int i = 0; i < buffer.Length; i++)
{
iVal |= ((UInt64)buffer[i] << (i * 8));
}
TestLibrary.TestFramework.LogInformation("Random UInt64 produced: " + iVal.ToString());
return iVal;
}
private string GetDataString(string strSrc, IFormatProvider provider)
{
string str1, str2, str;
int len1;
if (null == strSrc)
{
str1 = "null";
len1 = 0;
}
else
{
str1 = strSrc;
len1 = strSrc.Length;
}
str2 = (null == provider) ? "null" : provider.ToString();
str = string.Format("\n[Source string value]\n \"{0}\"", str1);
str += string.Format("\n[Length of source string]\n {0}", len1);
str += string.Format("\n[Format provider string]\n {0}", str2);
return str;
}
}
| |
#region License
/*
* Copyright 2004 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#endregion
#region Imports
using System;
using System.Collections;
using System.Collections.Generic;
using System.Reflection;
using System.Runtime.Serialization;
using System.Security.Policy;
using NUnit.Framework;
using Spring.Objects;
#endregion
namespace Spring.Util
{
/// <summary>
/// Unit tests for the ObjectUtils class.
/// </summary>
/// <author>Rick Evans (.NET)</author>
[TestFixture]
public class ObjectUtilsTests
{
[Test]
public void NullSafeEqualsWithBothNull()
{
string first = null;
string second = null;
Assert.IsTrue(ObjectUtils.NullSafeEquals(first, second));
}
[Test]
public void NullSafeEqualsWithFirstNull()
{
string first = null;
string second = "";
Assert.IsFalse(ObjectUtils.NullSafeEquals(first, second));
}
[Test]
public void NullSafeEqualsWithSecondNull()
{
string first = "";
string second = null;
Assert.IsFalse(ObjectUtils.NullSafeEquals(first, second));
}
[Test]
public void NullSafeEqualsBothEquals()
{
string first = "this is it";
string second = "this is it";
Assert.IsTrue(ObjectUtils.NullSafeEquals(first, second));
}
[Test]
public void NullSafeEqualsNotEqual()
{
string first = "this is it";
int second = 12;
Assert.IsFalse(ObjectUtils.NullSafeEquals(first, second));
}
[Test]
public void IsAssignableAndNotTransparentProxyWithProxy()
{
AppDomain domain = null;
try
{
AppDomainSetup setup = new AppDomainSetup();
setup.ApplicationBase = Environment.CurrentDirectory;
domain = AppDomain.CreateDomain("Spring", new Evidence(AppDomain.CurrentDomain.Evidence), setup);
object foo = domain.CreateInstanceAndUnwrap(GetType().Assembly.FullName, typeof(Foo).FullName);
// the instance is definitely assignable to the supplied interface type...
bool isAssignable = ObjectUtils.IsAssignableAndNotTransparentProxy(typeof (IFoo), foo);
Assert.IsFalse(isAssignable, "Proxied instance was not recognized as such.");
}
finally
{
AppDomain.Unload(domain);
}
}
[Test]
public void InstantiateTypeWithNullType()
{
Assert.Throws<ArgumentNullException>(() => ObjectUtils.InstantiateType(null));
}
[Test]
public void InstantiateType()
{
object foo = ObjectUtils.InstantiateType(typeof (TestObject));
Assert.IsNotNull(foo, "Failed to instantiate an instance of a valid Type.");
Assert.IsTrue(foo is TestObject, "The instantiated instance was not an instance of the type that was passed in.");
}
[Test]
public void InstantiateTypeThrowingWithinPublicConstructor()
{
try
{
ObjectUtils.InstantiateType(typeof(ThrowingWithinConstructor));
Assert.Fail();
}
catch(FatalReflectionException ex)
{
// no nasty "TargetInvocationException" is in between!
Assert.AreEqual( typeof(ThrowingWithinConstructorException), ex.InnerException.GetType() );
}
}
[Test]
public void InstantiateTypeWithOpenGenericType()
{
Assert.Throws<FatalReflectionException>(() => ObjectUtils.InstantiateType(typeof(Dictionary<,>)));
}
[Test]
public void InstantiateGenericTypeWithArguments()
{
// ObjectUtils.InstantiateType(typeof(Dictionary<string, int>), new object[] { new object() } );
}
[Test]
public void InstantiateTypeWithAbstractType()
{
Assert.Throws<FatalReflectionException>(() => ObjectUtils.InstantiateType(typeof (AbstractType)));
}
[Test]
public void InstantiateTypeWithInterfaceType()
{
Assert.Throws<FatalReflectionException>(() => ObjectUtils.InstantiateType(typeof (IList)));
}
[Test]
public void InstantiateTypeWithTypeExposingNoZeroArgCtor()
{
Assert.Throws<FatalReflectionException>(() => ObjectUtils.InstantiateType(typeof(NoZeroArgConstructorType)));
}
[Test]
public void InstantiateTypeWithPrivateCtor()
{
ConstructorInfo ctor = typeof (OnlyPrivateCtor).GetConstructor(
BindingFlags.Instance | BindingFlags.NonPublic, null, new Type[] {typeof (string)},
null);
object foo = ObjectUtils.InstantiateType(ctor, new object[] {"Chungking Express"});
Assert.IsNotNull(foo, "Failed to instantiate an instance of a valid Type.");
Assert.IsTrue(foo is OnlyPrivateCtor, "The instantiated instance was not an instance of the type that was passed in.");
Assert.AreEqual("Chungking Express", ((OnlyPrivateCtor) foo).Name);
}
[Test]
public void InstantiateTypeWithNullCtor()
{
Assert.Throws<ArgumentNullException>(() => ObjectUtils.InstantiateType(typeof(IList).GetConstructor(Type.EmptyTypes), new object[] { }));
}
[Test]
public void InstantiateTypeWithCtorWithNoArgs()
{
Type type = typeof (TestObject);
ConstructorInfo ctor = type.GetConstructor(Type.EmptyTypes);
object foo = ObjectUtils.InstantiateType(ctor, ObjectUtils.EmptyObjects);
Assert.IsNotNull(foo, "Failed to instantiate an instance of a valid Type.");
Assert.IsTrue(foo is TestObject, "The instantiated instance was not an instance of the Type that was passed in.");
}
[Test]
public void InstantiateTypeWithCtorArgs()
{
Type type = typeof (TestObject);
ConstructorInfo ctor = type.GetConstructor(new Type[] {typeof (string), typeof (int)});
object foo = ObjectUtils.InstantiateType(ctor, new object[] {"Yakov Petrovich Golyadkin", 39});
Assert.IsNotNull(foo, "Failed to instantiate an instance of a valid Type.");
Assert.IsTrue(foo is TestObject, "The instantiated instance was not an instance of the Type that was passed in.");
TestObject obj = foo as TestObject;
Assert.AreEqual("Yakov Petrovich Golyadkin", obj.Name);
Assert.AreEqual(39, obj.Age);
}
[Test]
public void InstantiateTypeWithBadCtorArgs()
{
Type type = typeof (TestObject);
ConstructorInfo ctor = type.GetConstructor(new Type[] {typeof(string), typeof(int)});
try
{
ObjectUtils.InstantiateType(ctor, new object[] { 39, "Yakov Petrovich Golyadkin" });
Assert.Fail("Should throw an error");
}
catch
{
// ok...
}
}
[Test]
public void IsSimpleProperty()
{
Assert.IsTrue(ObjectUtils.IsSimpleProperty(typeof (string)));
Assert.IsTrue(ObjectUtils.IsSimpleProperty(typeof (long)));
Assert.IsTrue(ObjectUtils.IsSimpleProperty(typeof (bool)));
Assert.IsTrue(ObjectUtils.IsSimpleProperty(typeof (int)));
Assert.IsTrue(ObjectUtils.IsSimpleProperty(typeof (float)));
Assert.IsTrue(ObjectUtils.IsSimpleProperty(typeof (ushort)));
Assert.IsTrue(ObjectUtils.IsSimpleProperty(typeof (double)));
Assert.IsTrue(ObjectUtils.IsSimpleProperty(typeof (ulong)));
Assert.IsTrue(ObjectUtils.IsSimpleProperty(typeof (char)));
Assert.IsTrue(ObjectUtils.IsSimpleProperty(typeof (uint)));
Assert.IsTrue(ObjectUtils.IsSimpleProperty(typeof (string[])));
Assert.IsTrue(ObjectUtils.IsSimpleProperty(typeof (Type)));
Assert.IsFalse(ObjectUtils.IsSimpleProperty(typeof (TestObject)));
Assert.IsFalse(ObjectUtils.IsSimpleProperty(typeof (IList[])));
}
[Test]
public void EnumerateFirstElement()
{
string expected = "Hiya";
IList list = new string[] {expected, "Aw!", "Man!"};
IEnumerator enumerator = list.GetEnumerator();
object actual = ObjectUtils.EnumerateFirstElement(enumerator);
Assert.AreEqual(expected, actual);
}
[Test]
public void EnumerateElementAtIndex()
{
string expected = "Mmm...";
IList list = new string[] {"Aw!", "Man!", expected};
IEnumerator enumerator = list.GetEnumerator();
object actual = ObjectUtils.EnumerateElementAtIndex(enumerator, 2);
Assert.AreEqual(expected, actual);
}
[Test]
public void EnumerateElementAtIndexViaIEnumerable()
{
string expected = "Mmm...";
IList list = new string[] {"Aw!", "Man!", expected};
object actual = ObjectUtils.EnumerateElementAtIndex(list, 2);
Assert.AreEqual(expected, actual);
}
[Test]
public void EnumerateElementAtOutOfRangeIndex()
{
string expected = "Mmm...";
IList list = new string[] {"Aw!", "Man!", expected};
IEnumerator enumerator = list.GetEnumerator();
Assert.Throws<ArgumentOutOfRangeException>(() => ObjectUtils.EnumerateElementAtIndex(enumerator, 12));
}
[Test]
public void EnumerateElementAtOutOfRangeIndexViaIEnumerable()
{
string expected = "Mmm...";
IList list = new string[] {"Aw!", "Man!", expected};
Assert.Throws<ArgumentOutOfRangeException>(() => ObjectUtils.EnumerateElementAtIndex(list, 12));
}
[Test]
public void EnumerateElementAtNegativeIndex()
{
string expected = "Mmm...";
IList list = new string[] {"Aw!", "Man!", expected};
IEnumerator enumerator = list.GetEnumerator();
Assert.Throws<ArgumentOutOfRangeException>(() => ObjectUtils.EnumerateElementAtIndex(enumerator, -10));
}
[Test]
public void EnumerateElementAtNegativeIndexViaIEnumerable()
{
string expected = "Mmm...";
IList list = new string[] {"Aw!", "Man!", expected};
Assert.Throws<ArgumentOutOfRangeException>(() => ObjectUtils.EnumerateElementAtIndex(list, -10));
}
#region Helper Classes
private interface IFoo
{
}
private sealed class Foo : MarshalByRefObject, IFoo
{
}
/// <summary>
/// A class that doesn't have a parameterless constructor.
/// </summary>
private class NoZeroArgConstructorType
{
/// <summary>
/// Creates a new instance of the NoZeroArgConstructorType class.
/// </summary>
/// <param name="foo">A spurious argument (ignored).</param>
public NoZeroArgConstructorType(string foo)
{
}
}
/// <summary>
/// An abstract class. Doh!
/// </summary>
private abstract class AbstractType
{
/// <summary>
/// Creates a new instance of the AbstractType class.
/// </summary>
public AbstractType()
{
}
}
private class OnlyPrivateCtor
{
private OnlyPrivateCtor(string name)
{
_name = name;
}
public string Name
{
get { return _name; }
set { _name = value; }
}
private string _name;
}
[Serializable]
private class ThrowingWithinConstructorException : TestException
{
public ThrowingWithinConstructorException()
{}
public ThrowingWithinConstructorException(string message) : base(message)
{}
public ThrowingWithinConstructorException(string message, Exception inner) : base(message, inner)
{}
protected ThrowingWithinConstructorException(SerializationInfo info, StreamingContext context) : base(info, context)
{}
}
public class ThrowingWithinConstructor
{
public ThrowingWithinConstructor()
{
throw new ThrowingWithinConstructorException();
}
}
#endregion
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
using System.Text;
namespace Microsoft.Azure.Batch
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Rest.Azure;
using Models = Microsoft.Azure.Batch.Protocol.Models;
/// <summary>
/// Summarizes the state of a compute node.
/// </summary>
public partial class ComputeNode : IRefreshable
{
#region ComputeNode
/// <summary>
/// Instantiates an unbound ComputeNodeUser object to be populated by the caller and used to create a user account on the compute node in the Azure Batch service.
/// </summary>
/// <returns>An <see cref="ComputeNodeUser"/> object.</returns>
public ComputeNodeUser CreateComputeNodeUser()
{
ComputeNodeUser newUser = new ComputeNodeUser(this.parentBatchClient, this.CustomBehaviors, this.parentPoolId, this.Id);
return newUser;
}
/// <summary>
/// Begins an asynchronous call to delete the specified ComputeNodeUser.
/// </summary>
/// <param name="userName">The name of the ComputeNodeUser to be deleted.</param>
/// <param name="additionalBehaviors">A collection of BatchClientBehavior instances that are applied after the CustomBehaviors on the current object.</param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> for controlling the lifetime of the asynchronous operation.</param>
/// <returns>A <see cref="System.Threading.Tasks.Task"/> object that represents the asynchronous operation.</returns>
public Task DeleteComputeNodeUserAsync(
string userName,
IEnumerable<BatchClientBehavior> additionalBehaviors = null,
CancellationToken cancellationToken = default(CancellationToken))
{
// create the behavior managaer
BehaviorManager bhMgr = new BehaviorManager(this.CustomBehaviors, additionalBehaviors);
Task asyncTask = this.parentBatchClient.ProtocolLayer.DeleteComputeNodeUser(this.parentPoolId, this.Id, userName, bhMgr, cancellationToken);
return asyncTask;
}
/// <summary>
/// Blocking call to delete the specified ComputeNodeUser.
/// </summary>
/// <param name="userName">The name of the ComputeNodeUser to be deleted.</param>
/// <param name="additionalBehaviors">A collection of BatchClientBehavior instances that are applied after the CustomBehaviors on the current object.</param>
public void DeleteComputeNodeUser(string userName, IEnumerable<BatchClientBehavior> additionalBehaviors = null)
{
Task asyncTask = DeleteComputeNodeUserAsync(userName, additionalBehaviors);
asyncTask.WaitAndUnaggregateException(this.CustomBehaviors, additionalBehaviors);
}
/// <summary>
/// Begins an asynchronous call to get RDP file data targeting the compute node of the current instance and write them to a specified Stream.
/// </summary>
/// <param name="rdpStream">The Stream into which the RDP file data will be written. This stream will not be closed or rewound by this call.</param>
/// <param name="additionalBehaviors">A collection of BatchClientBehavior instances that are applied after the CustomBehaviors on the current object.</param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> for controlling the lifetime of the asynchronous operation.</param>
/// <returns>A <see cref="System.Threading.Tasks.Task"/> object that represents the asynchronous operation.</returns>
public Task GetRDPFileAsync(Stream rdpStream, IEnumerable<BatchClientBehavior> additionalBehaviors = null, CancellationToken cancellationToken = default(CancellationToken))
{
// create the behavior managaer
BehaviorManager bhMgr = new BehaviorManager(this.CustomBehaviors, additionalBehaviors);
Task asyncTask = this.parentBatchClient.ProtocolLayer.GetComputeNodeRDPFile(this.parentPoolId, this.Id, rdpStream, bhMgr, cancellationToken);
return asyncTask;
}
/// <summary>
/// Blocking call to get RDP file data targeting the compute node of the current instance and write them to a specified Stream.
/// </summary>
/// <param name="rdpStream">The Stream into which the RDP file data will be written. This stream will not be closed or rewound by this call.</param>
/// <param name="additionalBehaviors">A collection of BatchClientBehavior instances that are applied after the CustomBehaviors on the current object.</param>
public void GetRDPFile(Stream rdpStream, IEnumerable<BatchClientBehavior> additionalBehaviors = null)
{
Task asyncTask = GetRDPFileAsync(rdpStream, additionalBehaviors);
asyncTask.WaitAndUnaggregateException(this.CustomBehaviors, additionalBehaviors);
}
/// <summary>
/// Begins an asynchronous call to get RDP file data targeting the compute node of the current instance and write them to a file with the specified name.
/// </summary>
/// <param name="rdpFileNameToCreate">The name of the RDP file to be created.</param>
/// <param name="additionalBehaviors">A collection of BatchClientBehavior instances that are applied after the CustomBehaviors on the current object.</param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> for controlling the lifetime of the asynchronous operation.</param>
/// <returns>A <see cref="System.Threading.Tasks.Task"/> object that represents the asynchronous operation.</returns>
public Task GetRDPFileAsync(
string rdpFileNameToCreate,
IEnumerable<BatchClientBehavior> additionalBehaviors = null,
CancellationToken cancellationToken = default(CancellationToken))
{
// create the behavior managaer
BehaviorManager bhMgr = new BehaviorManager(this.CustomBehaviors, additionalBehaviors);
Task asyncTask = this.parentBatchClient.PoolOperations.GetRDPFileViaFileNameAsyncImpl(this.parentPoolId, this.Id, rdpFileNameToCreate, bhMgr, cancellationToken);
return asyncTask;
}
/// <summary>
/// Blocking call to get RDP file data targeting the compute node of the current instance and write them to a file with the specified name.
/// </summary>
/// <param name="rdpFileNameToCreate">The name of the RDP file to be created.</param>
/// <param name="additionalBehaviors">A collection of BatchClientBehavior instances that are applied after the CustomBehaviors on the current object.</param>
public void GetRDPFile(string rdpFileNameToCreate, IEnumerable<BatchClientBehavior> additionalBehaviors = null)
{
Task asyncTask = GetRDPFileAsync(rdpFileNameToCreate, additionalBehaviors);
asyncTask.WaitAndUnaggregateException(this.CustomBehaviors, additionalBehaviors);
}
/// <summary>
/// Gets the settings required for remote login to a compute node.
/// </summary>
/// <param name="additionalBehaviors">A collection of <see cref="BatchClientBehavior"/> instances that are applied to the Batch service request after the <see cref="CustomBehaviors"/>.</param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> for controlling the lifetime of the asynchronous operation.</param>
/// <returns>A <see cref="System.Threading.Tasks.Task"/> that represents the asynchronous operation.</returns>
/// <remarks>
/// <para>The get remote login settings operation runs asynchronously.</para>
/// <para>This method can be invoked only if the pool is created with a <see cref="VirtualMachineConfiguration"/> property.
/// If this method is invoked on pools created with <see cref="CloudServiceConfiguration" />, then Batch service returns 409 (Conflict).
/// For pools with a <see cref="CloudServiceConfiguration" /> property, one of the GetRDPFileAsync/GetRDPFile methods must be used.</para>
/// </remarks>
public System.Threading.Tasks.Task<RemoteLoginSettings> GetRemoteLoginSettingsAsync(IEnumerable<BatchClientBehavior> additionalBehaviors = null, CancellationToken cancellationToken = default(CancellationToken))
{
// create the behavior manager
BehaviorManager bhMgr = new BehaviorManager(this.CustomBehaviors, additionalBehaviors);
System.Threading.Tasks.Task<RemoteLoginSettings> asyncTask = this.parentBatchClient.PoolOperations.GetRemoteLoginSettingsImpl(
this.parentPoolId,
this.Id,
bhMgr,
cancellationToken);
return asyncTask;
}
/// <summary>
/// Gets the settings required for remote login to a compute node.
/// </summary>
/// <param name="additionalBehaviors">A collection of <see cref="BatchClientBehavior"/> instances that are applied to the Batch service request after the <see cref="CustomBehaviors"/>.</param>
/// <remarks>
/// <para>This is a blocking operation. For a non-blocking equivalent, see <see cref="Microsoft.Azure.Batch.ComputeNode.GetRemoteLoginSettingsAsync"/>.</para>
/// <para>This method can be invoked only if the pool is created with a <see cref="Microsoft.Azure.Batch.VirtualMachineConfiguration"/> property.
/// If this method is invoked on pools created with <see cref="Microsoft.Azure.Batch.CloudServiceConfiguration" />, then Batch service returns 409 (Conflict).
/// For pools with a <see cref="Microsoft.Azure.Batch.CloudServiceConfiguration" /> property, one of the GetRDPFileAsync/GetRDPFile methods must be used.</para>
/// </remarks>
public RemoteLoginSettings GetRemoteLoginSettings(IEnumerable<BatchClientBehavior> additionalBehaviors = null)
{
Task<RemoteLoginSettings> asyncTask = GetRemoteLoginSettingsAsync(additionalBehaviors);
RemoteLoginSettings rls = asyncTask.WaitAndUnaggregateException(this.CustomBehaviors, additionalBehaviors);
return rls;
}
/// <summary>
/// Begins an asynchronous call to remove the compute node from the pool.
/// </summary>
/// <param name="deallocationOption">
/// Specifies how to handle tasks already running, and when the nodes running them may be removed from the pool. The default is <see cref="Common.ComputeNodeDeallocationOption.Requeue"/>.
/// </param>
/// <param name="resizeTimeout">The maximum amount of time which the RemoveFromPool operation can take before being terminated by the Azure Batch system.</param>
/// <param name="additionalBehaviors">A collection of BatchClientBehavior instances that are applied after the CustomBehaviors on the current object.</param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> for controlling the lifetime of the asynchronous operation.</param>
/// <returns>A <see cref="System.Threading.Tasks.Task"/> object that represents the asynchronous operation.</returns>
public Task RemoveFromPoolAsync(
Common.ComputeNodeDeallocationOption? deallocationOption = null,
TimeSpan? resizeTimeout = null,
IEnumerable<BatchClientBehavior> additionalBehaviors = null,
CancellationToken cancellationToken = default(CancellationToken))
{
// create the behavior managaer
BehaviorManager bhMgr = new BehaviorManager(this.CustomBehaviors, additionalBehaviors);
List<string> computeNodeIds = new List<string> {this.Id};
Task asyncTask = this.parentBatchClient.PoolOperations.RemoveFromPoolAsyncImpl(this.parentPoolId, computeNodeIds, deallocationOption, resizeTimeout, bhMgr, cancellationToken);
return asyncTask;
}
/// <summary>
/// Blocking call to remove the compute node from the pool.
/// </summary>
/// <param name="deallocationOption">
/// Specifies how to handle tasks already running, and when the nodes running them may be removed from the pool. The default is <see cref="Common.ComputeNodeDeallocationOption.Requeue"/>.
/// </param>
/// <param name="resizeTimeout">The maximum amount of time which the RemoveFromPool operation can take before being terminated by the Azure Batch system.</param>
/// <param name="additionalBehaviors">A collection of BatchClientBehavior instances that are applied after the CustomBehaviors on the current object.</param>
public void RemoveFromPool(Common.ComputeNodeDeallocationOption? deallocationOption = null, TimeSpan? resizeTimeout = null, IEnumerable<BatchClientBehavior> additionalBehaviors = null)
{
Task asyncTask = RemoveFromPoolAsync(deallocationOption, resizeTimeout, additionalBehaviors);
asyncTask.WaitAndUnaggregateException(this.CustomBehaviors, additionalBehaviors);
}
/// <summary>
/// Begins an asynchronous call to reboot the compute node.
/// </summary>
/// <param name="rebootOption">The reboot option associated with the reboot.</param>
/// <param name="additionalBehaviors">A collection of BatchClientBehavior instances that are applied after the CustomBehaviors on the current object.</param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> for controlling the lifetime of the asynchronous operation.</param>
/// <returns>A <see cref="System.Threading.Tasks.Task"/> object that represents the asynchronous operation.</returns>
public Task RebootAsync(
Common.ComputeNodeRebootOption? rebootOption = null,
IEnumerable<BatchClientBehavior> additionalBehaviors = null,
CancellationToken cancellationToken = default(CancellationToken))
{
// create the behavior manager
BehaviorManager bhMgr = new BehaviorManager(this.CustomBehaviors, additionalBehaviors);
Task asyncTask = this.parentBatchClient.ProtocolLayer.RebootComputeNode(this.parentPoolId, this.Id, rebootOption, bhMgr, cancellationToken);
return asyncTask;
}
/// <summary>
/// Blocking call to reboot the compute node.
/// </summary>
/// <param name="rebootOption">The reboot option associated with the reboot.</param>
/// <param name="additionalBehaviors">A collection of BatchClientBehavior instances that are applied after the CustomBehaviors on the current object.</param>
public void Reboot(Common.ComputeNodeRebootOption? rebootOption = null, IEnumerable<BatchClientBehavior> additionalBehaviors = null)
{
Task asyncTask = RebootAsync(rebootOption, additionalBehaviors);
asyncTask.WaitAndUnaggregateException(this.CustomBehaviors, additionalBehaviors);
}
/// <summary>
/// Begins an asynchronous call to reimage the compute node.
/// </summary>
/// <param name="reimageOption">The reimage option associated with the reimage.</param>
/// <param name="additionalBehaviors">A collection of BatchClientBehavior instances that are applied after the CustomBehaviors on the current object.</param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> for controlling the lifetime of the asynchronous operation.</param>
/// <returns>A <see cref="System.Threading.Tasks.Task"/> object that represents the asynchronous operation.</returns>
public Task ReimageAsync(
Common.ComputeNodeReimageOption? reimageOption = null,
IEnumerable<BatchClientBehavior> additionalBehaviors = null,
CancellationToken cancellationToken = default(CancellationToken))
{
// create the behavior manager
BehaviorManager bhMgr = new BehaviorManager(this.CustomBehaviors, additionalBehaviors);
Task asyncTask = this.parentBatchClient.ProtocolLayer.ReimageComputeNode(this.parentPoolId, this.Id, reimageOption, bhMgr, cancellationToken);
return asyncTask;
}
/// <summary>
/// Blocking call to reimage the compute node.
/// </summary>
/// <param name="reimageOption">The reimage option associated with the reimage.</param>
/// <param name="additionalBehaviors">A collection of BatchClientBehavior instances that are applied after the CustomBehaviors on the current object.</param>
public void Reimage(Common.ComputeNodeReimageOption? reimageOption = null, IEnumerable<BatchClientBehavior> additionalBehaviors = null)
{
Task asyncTask = ReimageAsync(reimageOption, additionalBehaviors);
asyncTask.WaitAndUnaggregateException(this.CustomBehaviors, additionalBehaviors);
}
/// <summary>
/// Begins an asynchronous request to get the specified NodeFile.
/// </summary>
/// <param name="filePath">The path of the file to retrieve.</param>
/// <param name="additionalBehaviors">A collection of BatchClientBehavior instances that are applied after the CustomBehaviors on the current object.</param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> for controlling the lifetime of the asynchronous operation.</param>
/// <returns>A <see cref="System.Threading.Tasks.Task"/> object that represents the asynchronous operation.</returns>
public System.Threading.Tasks.Task<NodeFile> GetNodeFileAsync(
string filePath,
IEnumerable<BatchClientBehavior> additionalBehaviors = null,
CancellationToken cancellationToken = default(CancellationToken))
{
// create the behavior manager
BehaviorManager bhMgr = new BehaviorManager(this.CustomBehaviors, additionalBehaviors);
Task<NodeFile> asyncTask = this.parentBatchClient.PoolOperations.GetNodeFileAsyncImpl(this.parentPoolId, this.Id, filePath, bhMgr, cancellationToken);
return asyncTask;
}
/// <summary>
/// Blocking call to get the specified NodeFile.
/// </summary>
/// <param name="filePath">The path of the file to retrieve.</param>
/// <param name="additionalBehaviors">A collection of BatchClientBehavior instances that are applied after the CustomBehaviors on the current object.</param>
/// <returns>A bound <see cref="NodeFile"/> object.</returns>
public NodeFile GetNodeFile(string filePath, IEnumerable<BatchClientBehavior> additionalBehaviors = null)
{
Task<NodeFile> asyncTask = this.GetNodeFileAsync(filePath, additionalBehaviors);
return asyncTask.WaitAndUnaggregateException(this.CustomBehaviors, additionalBehaviors);
}
/// <summary>
/// Copies the contents of a file from the node to the given <see cref="Stream"/>.
/// </summary>
/// <param name="filePath">The path of the file to retrieve.</param>
/// <param name="stream">The stream to copy the file contents to.</param>
/// <param name="byteRange">A byte range defining what section of the file to copy. If omitted, the entire file is downloaded.</param>
/// <param name="additionalBehaviors">A collection of BatchClientBehavior instances that are applied after the CustomBehaviors on the current object.</param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> for controlling the lifetime of the asynchronous operation.</param>
/// <returns>A <see cref="System.Threading.Tasks.Task"/> object that represents the asynchronous operation.</returns>
public Task CopyNodeFileContentToStreamAsync(
string filePath,
Stream stream,
GetFileRequestByteRange byteRange = null,
IEnumerable<BatchClientBehavior> additionalBehaviors = null,
CancellationToken cancellationToken = default(CancellationToken))
{
// create the behavior manager
BehaviorManager bhMgr = new BehaviorManager(this.CustomBehaviors, additionalBehaviors);
Task asyncTask = this.parentBatchClient.PoolOperations.CopyNodeFileContentToStreamAsyncImpl(
this.parentPoolId,
this.Id,
filePath,
stream,
byteRange,
bhMgr,
cancellationToken);
return asyncTask;
}
/// <summary>
/// Copies the contents of a file from the node to the given <see cref="Stream"/>.
/// </summary>
/// <param name="filePath">The path of the file to retrieve.</param>
/// <param name="stream">The stream to copy the file contents to.</param>
/// <param name="byteRange">A byte range defining what section of the file to copy. If omitted, the entire file is downloaded.</param>
/// <param name="additionalBehaviors">A collection of BatchClientBehavior instances that are applied after the CustomBehaviors on the current object.</param>
/// <returns>A bound <see cref="NodeFile"/> object.</returns>
public void CopyNodeFileContentToStream(
string filePath,
Stream stream,
GetFileRequestByteRange byteRange = null,
IEnumerable<BatchClientBehavior> additionalBehaviors = null)
{
Task asyncTask = this.CopyNodeFileContentToStreamAsync(filePath, stream, byteRange, additionalBehaviors);
asyncTask.WaitAndUnaggregateException(this.CustomBehaviors, additionalBehaviors);
}
/// <summary>
/// Reads the contents of a file from the specified node into a string.
/// </summary>
/// <param name="filePath">The path of the file to retrieve.</param>
/// <param name="encoding">The encoding to use. If no value or null is specified, UTF8 is used.</param>
/// <param name="byteRange">A byte range defining what section of the file to copy. If omitted, the entire file is downloaded.</param>
/// <param name="additionalBehaviors">A collection of BatchClientBehavior instances that are applied after the CustomBehaviors on the current object.</param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> for controlling the lifetime of the asynchronous operation.</param>
/// <returns>A <see cref="System.Threading.Tasks.Task"/> object that represents the asynchronous operation.</returns>
public Task<string> CopyNodeFileContentToStringAsync(
string filePath,
Encoding encoding = null,
GetFileRequestByteRange byteRange = null,
IEnumerable<BatchClientBehavior> additionalBehaviors = null,
CancellationToken cancellationToken = default(CancellationToken))
{
// create the behavior manager
BehaviorManager bhMgr = new BehaviorManager(this.CustomBehaviors, additionalBehaviors);
return this.parentBatchClient.PoolOperations.CopyNodeFileContentToStringAsyncImpl(
this.parentPoolId,
this.Id,
filePath,
encoding,
byteRange,
bhMgr,
cancellationToken);
}
/// <summary>
/// Reads the contents of a file from the specified node into a string.
/// </summary>
/// <param name="filePath">The path of the file to retrieve.</param>
/// <param name="encoding">The encoding to use. If no value or null is specified, UTF8 is used.</param>
/// <param name="byteRange">A byte range defining what section of the file to copy. If omitted, the entire file is downloaded.</param>
/// <param name="additionalBehaviors">A collection of BatchClientBehavior instances that are applied after the CustomBehaviors on the current object.</param>
/// <returns>A bound <see cref="NodeFile"/> object.</returns>
public string CopyNodeFileContentToString(
string filePath,
Encoding encoding = null,
GetFileRequestByteRange byteRange = null,
IEnumerable<BatchClientBehavior> additionalBehaviors = null)
{
Task<string> asyncTask = this.CopyNodeFileContentToStringAsync(filePath, encoding, byteRange, additionalBehaviors);
return asyncTask.WaitAndUnaggregateException(this.CustomBehaviors, additionalBehaviors);
}
/// <summary>
/// Exposes synchronous and asynchronous enumeration of the files for the node.
/// </summary>
/// <param name="recursive">If true, performs a recursive list of all files of the node. If false, returns only the files at the node directory root.</param>
/// <param name="detailLevel">Controls the detail level of the data returned by a call to the Azure Batch Service.</param>
/// <param name="additionalBehaviors">A collection of BatchClientBehavior instances that are applied after the CustomBehaviors on the current object and after the behavior implementing the DetailLevel.</param>
/// <returns>An instance of IPagedEnumerable that can be used to enumerate objects using either synchronous or asynchronous patterns.</returns>
public IPagedEnumerable<NodeFile> ListNodeFiles(bool? recursive = null, DetailLevel detailLevel = null, IEnumerable<BatchClientBehavior> additionalBehaviors = null)
{
// craft the behavior manager for this call
BehaviorManager bhMgr = new BehaviorManager(this.CustomBehaviors, additionalBehaviors);
IPagedEnumerable<NodeFile> enumerator = this.parentBatchClient.PoolOperations.ListNodeFilesImpl(this.parentPoolId, this.Id, recursive, bhMgr, detailLevel);
return enumerator;
}
/// <summary>
/// Enables task scheduling on the compute node.
/// </summary>
/// <param name="additionalBehaviors">A collection of <see cref="BatchClientBehavior"/> instances that are applied to the Batch service request after the <see cref="CustomBehaviors"/>.</param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> for controlling the lifetime of the asynchronous operation.</param>
/// <returns>A <see cref="System.Threading.Tasks.Task"/> that represents the asynchronous operation.</returns>
/// <remarks>This operation runs asynchronously.</remarks>
public System.Threading.Tasks.Task EnableSchedulingAsync(
IEnumerable<BatchClientBehavior> additionalBehaviors = null,
CancellationToken cancellationToken = default(CancellationToken))
{
System.Threading.Tasks.Task asyncTask = this.parentBatchClient.PoolOperations.EnableComputeNodeSchedulingAsync(this.parentPoolId, this.Id, additionalBehaviors, cancellationToken);
return asyncTask;
}
/// <summary>
/// Enables task scheduling on the compute node.
/// </summary>
/// <param name="additionalBehaviors">A collection of <see cref="BatchClientBehavior"/> instances that are applied to the Batch service request after the <see cref="CustomBehaviors"/>.</param>
/// <remarks>This is a blocking operation. For a non-blocking equivalent, see <see cref="EnableScheduling"/>.</remarks>
public void EnableScheduling(IEnumerable<BatchClientBehavior> additionalBehaviors = null)
{
Task asyncTask = EnableSchedulingAsync(additionalBehaviors, CancellationToken.None);
asyncTask.WaitAndUnaggregateException(this.CustomBehaviors, additionalBehaviors);
}
/// <summary>
/// Disables task scheduling on the compute node.
/// </summary>
/// <param name="disableComputeNodeSchedulingOption">Specifies what to do with currently running tasks. The default is <see cref="Common.DisableComputeNodeSchedulingOption.Requeue"/>.</param>
/// <param name="additionalBehaviors">A collection of <see cref="BatchClientBehavior"/> instances that are applied to the Batch service request after the <see cref="CustomBehaviors"/>.</param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> for controlling the lifetime of the asynchronous operation.</param>
/// <returns>A <see cref="System.Threading.Tasks.Task"/> that represents the asynchronous operation.</returns>
/// <remarks>This operation runs asynchronously.</remarks>
public System.Threading.Tasks.Task DisableSchedulingAsync(
Common.DisableComputeNodeSchedulingOption? disableComputeNodeSchedulingOption,
IEnumerable<BatchClientBehavior> additionalBehaviors = null,
CancellationToken cancellationToken = default(CancellationToken))
{
System.Threading.Tasks.Task asyncTask = this.parentBatchClient.PoolOperations.DisableComputeNodeSchedulingAsync(this.parentPoolId, this.Id, disableComputeNodeSchedulingOption, additionalBehaviors, cancellationToken);
return asyncTask;
}
/// <summary>
/// Disables task scheduling on the compute node.
/// </summary>
/// <param name="disableComputeNodeSchedulingOption">Specifies what to do with currently running tasks. The default is <see cref="Common.DisableComputeNodeSchedulingOption.Requeue"/>.</param>
/// <param name="additionalBehaviors">A collection of <see cref="BatchClientBehavior"/> instances that are applied to the Batch service request after the <see cref="CustomBehaviors"/>.</param>
/// <remarks>This is a blocking operation. For a non-blocking equivalent, see <see cref="DisableSchedulingAsync"/>.</remarks>
public void DisableScheduling(
Common.DisableComputeNodeSchedulingOption? disableComputeNodeSchedulingOption,
IEnumerable<BatchClientBehavior> additionalBehaviors = null)
{
Task asyncTask = DisableSchedulingAsync(disableComputeNodeSchedulingOption, additionalBehaviors, CancellationToken.None);
asyncTask.WaitAndUnaggregateException(this.CustomBehaviors, additionalBehaviors);
}
/// <summary>
/// Upload Azure Batch service log files from the compute node.
/// </summary>
/// <param name="containerUrl">
/// The URL of the container within Azure Blob Storage to which to upload the Batch Service log file(s). The URL must include a Shared Access Signature (SAS) granting write permissions to the container.
/// </param>
/// <param name="startTime">
/// The start of the time range from which to upload Batch Service log file(s). Any log file containing a log message in the time range will be uploaded.
/// This means that the operation might retrieve more logs than have been requested since the entire log file is always uploaded.
/// </param>
/// <param name="endTime">
/// The end of the time range from which to upload Batch Service log file(s). Any log file containing a log message in the time range will be uploaded.
/// This means that the operation might retrieve more logs than have been requested since the entire log file is always uploaded. If this is omitted, the default is the current time.
/// </param>
/// <param name="additionalBehaviors">A collection of <see cref="BatchClientBehavior"/> instances that are applied to the Batch service request after the <see cref="CustomBehaviors"/>.</param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> for controlling the lifetime of the asynchronous operation.</param>
/// <returns>A <see cref="System.Threading.Tasks.Task"/> that represents the asynchronous operation.</returns>
/// <remarks>
/// This is for gathering Azure Batch service log files in an automated fashion from nodes if you are experiencing an error and wish to escalate to Azure support.
/// The Azure Batch service log files should be shared with Azure support to aid in debugging issues with the Batch service.
/// </remarks>
public System.Threading.Tasks.Task<UploadBatchServiceLogsResult> UploadComputeNodeBatchServiceLogsAsync(
string containerUrl,
DateTime startTime,
DateTime? endTime = null,
IEnumerable<BatchClientBehavior> additionalBehaviors = null,
CancellationToken cancellationToken = default(CancellationToken))
{
// craft the behavior manager for this call
BehaviorManager bhMgr = new BehaviorManager(this.CustomBehaviors, additionalBehaviors);
return this.parentBatchClient.PoolOperations.UploadComputeNodeBatchServiceLogsAsyncImpl(
this.parentPoolId,
this.Id,
containerUrl,
startTime,
endTime,
bhMgr,
cancellationToken);
}
/// <summary>
/// Upload Azure Batch service log files from the specified compute node.
/// </summary>
/// <param name="containerUrl">
/// The URL of the container within Azure Blob Storage to which to upload the Batch Service log file(s). The URL must include a Shared Access Signature (SAS) granting write permissions to the container.
/// </param>
/// <param name="startTime">
/// The start of the time range from which to upload Batch Service log file(s). Any log file containing a log message in the time range will be uploaded.
/// This means that the operation might retrieve more logs than have been requested since the entire log file is always uploaded.
/// </param>
/// <param name="endTime">
/// The end of the time range from which to upload Batch Service log file(s). Any log file containing a log message in the time range will be uploaded.
/// This means that the operation might retrieve more logs than have been requested since the entire log file is always uploaded. If this is omitted, the default is the current time.
/// </param>
/// <param name="additionalBehaviors">A collection of <see cref="BatchClientBehavior"/> instances that are applied to the Batch service request after the <see cref="CustomBehaviors"/>.</param>
/// <remarks>
/// This is for gathering Azure Batch service log files in an automated fashion from nodes if you are experiencing an error and wish to escalate to Azure support.
/// The Azure Batch service log files should be shared with Azure support to aid in debugging issues with the Batch service.
/// </remarks>
/// <returns>The result of uploading the batch service logs.</returns>
public UploadBatchServiceLogsResult UploadComputeNodeBatchServiceLogs(
string containerUrl,
DateTime startTime,
DateTime? endTime = null,
IEnumerable<BatchClientBehavior> additionalBehaviors = null)
{
var asyncTask = this.UploadComputeNodeBatchServiceLogsAsync(
containerUrl,
startTime,
endTime,
additionalBehaviors);
return asyncTask.WaitAndUnaggregateException(this.CustomBehaviors, additionalBehaviors);
}
#endregion ComputeNode
#region IRefreshable
/// <summary>
/// Refreshes the current <see cref="ComputeNode"/>.
/// </summary>
/// <param name="detailLevel">The detail level for the refresh. If a detail level which omits the <see cref="Id"/> property is specified, refresh will fail.</param>
/// <param name="additionalBehaviors">A collection of <see cref="BatchClientBehavior"/> instances that are applied to the Batch service request after the <see cref="CustomBehaviors"/>.</param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> for controlling the lifetime of the asynchronous operation.</param>
/// <returns>A <see cref="Task"/> representing the asynchronous refresh operation.</returns>
public async Task RefreshAsync(DetailLevel detailLevel = null, IEnumerable<BatchClientBehavior> additionalBehaviors = null, CancellationToken cancellationToken = default(CancellationToken))
{
// create the behavior managaer
BehaviorManager bhMgr = new BehaviorManager(this.CustomBehaviors, additionalBehaviors, detailLevel);
System.Threading.Tasks.Task<AzureOperationResponse<Models.ComputeNode, Models.ComputeNodeGetHeaders>> asyncTask =
this.parentBatchClient.ProtocolLayer.GetComputeNode(this.parentPoolId, this.Id, bhMgr, cancellationToken);
AzureOperationResponse<Models.ComputeNode, Models.ComputeNodeGetHeaders> response = await asyncTask.ConfigureAwait(continueOnCapturedContext: false);
// get pool from response
Models.ComputeNode newProtocolComputeNode = response.Body;
this.propertyContainer = new PropertyContainer(newProtocolComputeNode);
}
/// <summary>
/// Refreshes the <see cref="ComputeNode"/>.
/// </summary>
/// <param name="detailLevel">The detail level for the refresh. If a detail level which omits the <see cref="Id"/> property is specified, refresh will fail.</param>
/// <param name="additionalBehaviors">A collection of <see cref="BatchClientBehavior"/> instances that are applied to the Batch service request after the <see cref="CustomBehaviors"/>.</param>
public void Refresh(DetailLevel detailLevel = null, IEnumerable<BatchClientBehavior> additionalBehaviors = null)
{
Task asyncTask = RefreshAsync(detailLevel, additionalBehaviors);
asyncTask.WaitAndUnaggregateException(this.CustomBehaviors, additionalBehaviors);
}
#endregion
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.Text;
using System.IO;
using System.Reflection;
using System.Threading;
using System.Timers;
using log4net;
using OpenMetaverse;
using OpenMetaverse.Assets;
using OpenMetaverse.Packets;
using Nini.Config;
using OpenSim.Framework;
using OpenSim.Framework.Console;
using pCampBot.Interfaces;
using Timer = System.Timers.Timer;
using PermissionMask = OpenSim.Framework.PermissionMask;
namespace pCampBot
{
public enum ConnectionState
{
Disconnected,
Connecting,
Connected,
Disconnecting
}
public class Bot
{
private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
public int PacketDebugLevel
{
get { return m_packetDebugLevel; }
set
{
if (value == m_packetDebugLevel)
return;
m_packetDebugLevel = value;
if (Client != null)
{
if (m_packetDebugLevel <= 0)
Client.Network.UnregisterCallback(PacketType.Default, PacketReceivedDebugHandler);
else
Client.Network.RegisterCallback(PacketType.Default, PacketReceivedDebugHandler, false);
}
}
}
private int m_packetDebugLevel;
public delegate void AnEvent(Bot callbot, EventType someevent); // event delegate for bot events
/// <summary>
/// Controls whether bots request textures for the object information they receive
/// </summary>
public bool RequestObjectTextures { get; set; }
/// <summary>
/// Bot manager.
/// </summary>
public BotManager Manager { get; private set; }
/// <summary>
/// Behaviours implemented by this bot.
/// </summary>
/// <remarks>
/// Indexed by abbreviated name. There can only be one instance of a particular behaviour.
/// Lock this structure before manipulating it.
/// </remarks>
public Dictionary<string, IBehaviour> Behaviours { get; private set; }
/// <summary>
/// Objects that the bot has discovered.
/// </summary>
/// <remarks>
/// Returns a list copy. Inserting new objects manually will have no effect.
/// </remarks>
public Dictionary<UUID, Primitive> Objects
{
get
{
lock (m_objects)
return new Dictionary<UUID, Primitive>(m_objects);
}
}
private Dictionary<UUID, Primitive> m_objects = new Dictionary<UUID, Primitive>();
/// <summary>
/// Is this bot connected to the grid?
/// </summary>
public ConnectionState ConnectionState { get; private set; }
public List<Simulator> Simulators
{
get
{
lock (Client.Network.Simulators)
return new List<Simulator>(Client.Network.Simulators);
}
}
/// <summary>
/// The number of connections that this bot has to different simulators.
/// </summary>
/// <value>Includes both root and child connections.</value>
public int SimulatorsCount
{
get
{
lock (Client.Network.Simulators)
return Client.Network.Simulators.Count;
}
}
public string FirstName { get; private set; }
public string LastName { get; private set; }
public string Name { get; private set; }
public string Password { get; private set; }
public string LoginUri { get; private set; }
public string StartLocation { get; private set; }
public string saveDir;
public string wear;
public event AnEvent OnConnected;
public event AnEvent OnDisconnected;
/// <summary>
/// Keep a track of the continuously acting thread so that we can abort it.
/// </summary>
private Thread m_actionThread;
protected List<uint> objectIDs = new List<uint>();
/// <summary>
/// Random number generator.
/// </summary>
public Random Random { get; private set; }
/// <summary>
/// New instance of a SecondLife client
/// </summary>
public GridClient Client { get; private set; }
/// <summary>
/// Constructor
/// </summary>
/// <param name="bm"></param>
/// <param name="behaviours">Behaviours for this bot to perform</param>
/// <param name="firstName"></param>
/// <param name="lastName"></param>
/// <param name="password"></param>
/// <param name="loginUri"></param>
/// <param name="behaviours"></param>
public Bot(
BotManager bm, List<IBehaviour> behaviours,
string firstName, string lastName, string password, string startLocation, string loginUri)
{
ConnectionState = ConnectionState.Disconnected;
Random = new Random(bm.Rng.Next());
FirstName = firstName;
LastName = lastName;
Name = string.Format("{0} {1}", FirstName, LastName);
Password = password;
LoginUri = loginUri;
StartLocation = startLocation;
Manager = bm;
Behaviours = new Dictionary<string, IBehaviour>();
foreach (IBehaviour behaviour in behaviours)
AddBehaviour(behaviour);
// Only calling for use as a template.
CreateLibOmvClient();
}
public bool TryGetBehaviour(string abbreviatedName, out IBehaviour behaviour)
{
lock (Behaviours)
return Behaviours.TryGetValue(abbreviatedName, out behaviour);
}
public bool AddBehaviour(IBehaviour behaviour)
{
Dictionary<string, IBehaviour> updatedBehaviours = new Dictionary<string, IBehaviour>(Behaviours);
if (!updatedBehaviours.ContainsKey(behaviour.AbbreviatedName))
{
behaviour.Initialize(this);
updatedBehaviours.Add(behaviour.AbbreviatedName, behaviour);
Behaviours = updatedBehaviours;
return true;
}
return false;
}
public bool RemoveBehaviour(string abbreviatedName)
{
if (Behaviours.Count <= 0)
return false;
Dictionary<string, IBehaviour> updatedBehaviours = new Dictionary<string, IBehaviour>(Behaviours);
IBehaviour behaviour;
if (!updatedBehaviours.TryGetValue(abbreviatedName, out behaviour))
return false;
updatedBehaviours.Remove(abbreviatedName);
Behaviours = updatedBehaviours;
behaviour.Close();
return true;
}
private void CreateLibOmvClient()
{
GridClient newClient = new GridClient();
if (Client != null)
{
// Remove any registered debug handlers
Client.Network.UnregisterCallback(PacketType.Default, PacketReceivedDebugHandler);
newClient.Settings.LOGIN_SERVER = Client.Settings.LOGIN_SERVER;
newClient.Settings.ALWAYS_DECODE_OBJECTS = Client.Settings.ALWAYS_DECODE_OBJECTS;
newClient.Settings.AVATAR_TRACKING = Client.Settings.AVATAR_TRACKING;
newClient.Settings.OBJECT_TRACKING = Client.Settings.OBJECT_TRACKING;
newClient.Settings.SEND_AGENT_THROTTLE = Client.Settings.SEND_AGENT_THROTTLE;
newClient.Settings.SEND_AGENT_UPDATES = Client.Settings.SEND_AGENT_UPDATES;
newClient.Settings.SEND_PINGS = Client.Settings.SEND_PINGS;
newClient.Settings.STORE_LAND_PATCHES = Client.Settings.STORE_LAND_PATCHES;
newClient.Settings.USE_ASSET_CACHE = Client.Settings.USE_ASSET_CACHE;
newClient.Settings.MULTIPLE_SIMS = Client.Settings.MULTIPLE_SIMS;
newClient.Throttle.Asset = Client.Throttle.Asset;
newClient.Throttle.Land = Client.Throttle.Land;
newClient.Throttle.Task = Client.Throttle.Task;
newClient.Throttle.Texture = Client.Throttle.Texture;
newClient.Throttle.Wind = Client.Throttle.Wind;
newClient.Throttle.Total = Client.Throttle.Total;
}
else
{
newClient.Settings.LOGIN_SERVER = LoginUri;
newClient.Settings.ALWAYS_DECODE_OBJECTS = false;
newClient.Settings.AVATAR_TRACKING = false;
newClient.Settings.OBJECT_TRACKING = false;
newClient.Settings.SEND_AGENT_THROTTLE = true;
newClient.Settings.SEND_PINGS = true;
newClient.Settings.STORE_LAND_PATCHES = false;
newClient.Settings.USE_ASSET_CACHE = false;
newClient.Settings.MULTIPLE_SIMS = true;
newClient.Throttle.Asset = 100000;
newClient.Throttle.Land = 100000;
newClient.Throttle.Task = 100000;
newClient.Throttle.Texture = 100000;
newClient.Throttle.Wind = 100000;
newClient.Throttle.Total = 400000;
}
newClient.Network.LoginProgress += Network_LoginProgress;
newClient.Network.SimConnected += Network_SimConnected;
newClient.Network.SimDisconnected += Network_SimDisconnected;
newClient.Network.Disconnected += Network_OnDisconnected;
newClient.Objects.ObjectUpdate += Objects_NewPrim;
if (m_packetDebugLevel > 0)
newClient.Network.RegisterCallback(PacketType.Default, PacketReceivedDebugHandler);
Client = newClient;
}
//We do our actions here. This is where one would
//add additional steps and/or things the bot should do
private void Action()
{
while (ConnectionState == ConnectionState.Connected)
{
foreach (IBehaviour behaviour in Behaviours.Values)
{
// Thread.Sleep(Random.Next(3000, 10000));
// m_log.DebugFormat("[pCAMPBOT]: For {0} performing action {1}", Name, b.GetType());
behaviour.Action();
}
}
foreach (IBehaviour b in Behaviours.Values)
b.Close();
}
/// <summary>
/// Tells LibSecondLife to logout and disconnect. Raises the disconnect events once it finishes.
/// </summary>
public void Disconnect()
{
ConnectionState = ConnectionState.Disconnecting;
foreach (IBehaviour behaviour in Behaviours.Values)
behaviour.Close();
Client.Network.Logout();
}
public void Connect()
{
Thread connectThread = new Thread(ConnectInternal);
connectThread.Name = Name;
connectThread.IsBackground = true;
connectThread.Start();
}
/// <summary>
/// This is the bot startup loop.
/// </summary>
private void ConnectInternal()
{
ConnectionState = ConnectionState.Connecting;
// Current create a new client on each connect. libomv doesn't seem to process new sim
// information (e.g. EstablishAgentCommunication events) if connecting after a disceonnect with the same
// client
CreateLibOmvClient();
if (Client.Network.Login(FirstName, LastName, Password, "pCampBot", StartLocation, "pCampBot"))
{
ConnectionState = ConnectionState.Connected;
Thread.Sleep(Random.Next(1000, 10000));
m_actionThread = new Thread(Action);
m_actionThread.Start();
// OnConnected(this, EventType.CONNECTED);
if (wear == "save")
{
SaveDefaultAppearance();
}
else if (wear != "no")
{
MakeDefaultAppearance(wear);
}
// Extract nearby region information.
Client.Grid.GridRegion += Manager.Grid_GridRegion;
uint xUint, yUint;
Utils.LongToUInts(Client.Network.CurrentSim.Handle, out xUint, out yUint);
ushort minX, minY, maxX, maxY;
minX = (ushort)Math.Min(0, xUint - 5);
minY = (ushort)Math.Min(0, yUint - 5);
maxX = (ushort)(xUint + 5);
maxY = (ushort)(yUint + 5);
Client.Grid.RequestMapBlocks(GridLayerType.Terrain, minX, minY, maxX, maxY, false);
}
else
{
ConnectionState = ConnectionState.Disconnected;
m_log.ErrorFormat(
"{0} {1} cannot login: {2}", FirstName, LastName, Client.Network.LoginMessage);
if (OnDisconnected != null)
{
OnDisconnected(this, EventType.DISCONNECTED);
}
}
}
/// <summary>
/// Sit this bot on the ground.
/// </summary>
public void SitOnGround()
{
if (ConnectionState == ConnectionState.Connected)
Client.Self.SitOnGround();
}
/// <summary>
/// Stand this bot
/// </summary>
public void Stand()
{
if (ConnectionState == ConnectionState.Connected)
{
// Unlike sit on ground, here libomv checks whether we have SEND_AGENT_UPDATES enabled.
bool prevUpdatesSetting = Client.Settings.SEND_AGENT_UPDATES;
Client.Settings.SEND_AGENT_UPDATES = true;
Client.Self.Stand();
Client.Settings.SEND_AGENT_UPDATES = prevUpdatesSetting;
}
}
public void SaveDefaultAppearance()
{
saveDir = "MyAppearance/" + FirstName + "_" + LastName;
if (!Directory.Exists(saveDir))
{
Directory.CreateDirectory(saveDir);
}
Array wtypes = Enum.GetValues(typeof(WearableType));
foreach (WearableType wtype in wtypes)
{
UUID wearable = Client.Appearance.GetWearableAsset(wtype);
if (wearable != UUID.Zero)
{
Client.Assets.RequestAsset(wearable, AssetType.Clothing, false, Asset_ReceivedCallback);
Client.Assets.RequestAsset(wearable, AssetType.Bodypart, false, Asset_ReceivedCallback);
}
}
}
public void SaveAsset(AssetWearable asset)
{
if (asset != null)
{
try
{
if (asset.Decode())
{
File.WriteAllBytes(Path.Combine(saveDir, String.Format("{1}.{0}",
asset.AssetType.ToString().ToLower(),
asset.WearableType)), asset.AssetData);
}
else
{
m_log.WarnFormat("Failed to decode {0} asset {1}", asset.AssetType, asset.AssetID);
}
}
catch (Exception e)
{
m_log.ErrorFormat("Exception: {0}{1}", e.Message, e.StackTrace);
}
}
}
public WearableType GetWearableType(string path)
{
string type = ((((path.Split('/'))[2]).Split('.'))[0]).Trim();
switch (type)
{
case "Eyes":
return WearableType.Eyes;
case "Hair":
return WearableType.Hair;
case "Pants":
return WearableType.Pants;
case "Shape":
return WearableType.Shape;
case "Shirt":
return WearableType.Shirt;
case "Skin":
return WearableType.Skin;
default:
return WearableType.Shape;
}
}
public void MakeDefaultAppearance(string wear)
{
try
{
if (wear == "yes")
{
//TODO: Implement random outfit picking
m_log.DebugFormat("Picks a random outfit. Not yet implemented.");
}
else if (wear != "save")
saveDir = "MyAppearance/" + wear;
saveDir = saveDir + "/";
string[] clothing = Directory.GetFiles(saveDir, "*.clothing", SearchOption.TopDirectoryOnly);
string[] bodyparts = Directory.GetFiles(saveDir, "*.bodypart", SearchOption.TopDirectoryOnly);
InventoryFolder clothfolder = FindClothingFolder();
UUID transid = UUID.Random();
List<InventoryBase> listwearables = new List<InventoryBase>();
for (int i = 0; i < clothing.Length; i++)
{
UUID assetID = UUID.Random();
AssetClothing asset = new AssetClothing(assetID, File.ReadAllBytes(clothing[i]));
asset.Decode();
asset.Owner = Client.Self.AgentID;
asset.WearableType = GetWearableType(clothing[i]);
asset.Encode();
transid = Client.Assets.RequestUpload(asset,true);
Client.Inventory.RequestCreateItem(clothfolder.UUID, "MyClothing" + i.ToString(), "MyClothing", AssetType.Clothing,
transid, InventoryType.Wearable, asset.WearableType, (OpenMetaverse.PermissionMask)PermissionMask.All, delegate(bool success, InventoryItem item)
{
if (success)
{
listwearables.Add(item);
}
else
{
m_log.WarnFormat("Failed to create item {0}", item.Name);
}
}
);
}
for (int i = 0; i < bodyparts.Length; i++)
{
UUID assetID = UUID.Random();
AssetBodypart asset = new AssetBodypart(assetID, File.ReadAllBytes(bodyparts[i]));
asset.Decode();
asset.Owner = Client.Self.AgentID;
asset.WearableType = GetWearableType(bodyparts[i]);
asset.Encode();
transid = Client.Assets.RequestUpload(asset,true);
Client.Inventory.RequestCreateItem(clothfolder.UUID, "MyBodyPart" + i.ToString(), "MyBodyPart", AssetType.Bodypart,
transid, InventoryType.Wearable, asset.WearableType, (OpenMetaverse.PermissionMask)PermissionMask.All, delegate(bool success, InventoryItem item)
{
if (success)
{
listwearables.Add(item);
}
else
{
m_log.WarnFormat("Failed to create item {0}", item.Name);
}
}
);
}
Thread.Sleep(1000);
if (listwearables == null || listwearables.Count == 0)
{
m_log.DebugFormat("Nothing to send on this folder!");
}
else
{
m_log.DebugFormat("Sending {0} wearables...", listwearables.Count);
Client.Appearance.WearOutfit(listwearables, false);
}
}
catch (Exception ex)
{
Console.WriteLine(ex.ToString());
}
}
public InventoryFolder FindClothingFolder()
{
UUID rootfolder = Client.Inventory.Store.RootFolder.UUID;
List<InventoryBase> listfolders = Client.Inventory.Store.GetContents(rootfolder);
InventoryFolder clothfolder = new InventoryFolder(UUID.Random());
foreach (InventoryBase folder in listfolders)
{
if (folder.Name == "Clothing")
{
clothfolder = (InventoryFolder)folder;
break;
}
}
return clothfolder;
}
public void Network_LoginProgress(object sender, LoginProgressEventArgs args)
{
m_log.DebugFormat("[BOT]: Bot {0} {1} in Network_LoginProcess", Name, args.Status);
if (args.Status == LoginStatus.Success)
{
if (OnConnected != null)
{
OnConnected(this, EventType.CONNECTED);
}
}
}
public void Network_SimConnected(object sender, SimConnectedEventArgs args)
{
m_log.DebugFormat(
"[BOT]: Bot {0} connected to region {1} at {2}", Name, args.Simulator.Name, args.Simulator.IPEndPoint);
}
public void Network_SimDisconnected(object sender, SimDisconnectedEventArgs args)
{
m_log.DebugFormat(
"[BOT]: Bot {0} disconnected from region {1} at {2}", Name, args.Simulator.Name, args.Simulator.IPEndPoint);
}
public void Network_OnDisconnected(object sender, DisconnectedEventArgs args)
{
ConnectionState = ConnectionState.Disconnected;
m_log.DebugFormat(
"[BOT]: Bot {0} disconnected from grid, reason {1}, message {2}", Name, args.Reason, args.Message);
// m_log.ErrorFormat("Fired Network_OnDisconnected");
// if (
// (args.Reason == NetworkManager.DisconnectType.SimShutdown
// || args.Reason == NetworkManager.DisconnectType.NetworkTimeout)
// && OnDisconnected != null)
if (
(args.Reason == NetworkManager.DisconnectType.ClientInitiated
|| args.Reason == NetworkManager.DisconnectType.ServerInitiated
|| args.Reason == NetworkManager.DisconnectType.NetworkTimeout)
&& OnDisconnected != null)
// if (OnDisconnected != null)
{
OnDisconnected(this, EventType.DISCONNECTED);
}
}
public void Objects_NewPrim(object sender, PrimEventArgs args)
{
if (!RequestObjectTextures)
return;
Primitive prim = args.Prim;
if (prim != null)
{
lock (m_objects)
m_objects[prim.ID] = prim;
if (prim.Textures != null)
{
if (prim.Textures.DefaultTexture.TextureID != UUID.Zero)
{
GetTextureOrMesh(prim.Textures.DefaultTexture.TextureID, true);
}
for (int i = 0; i < prim.Textures.FaceTextures.Length; i++)
{
Primitive.TextureEntryFace face = prim.Textures.FaceTextures[i];
if (face != null)
{
UUID textureID = prim.Textures.FaceTextures[i].TextureID;
if (textureID != UUID.Zero)
GetTextureOrMesh(textureID, true);
}
}
}
if (prim.Sculpt != null && prim.Sculpt.SculptTexture != UUID.Zero)
{
bool mesh = (prim.Sculpt.Type == SculptType.Mesh);
GetTextureOrMesh(prim.Sculpt.SculptTexture, !mesh);
}
}
}
private void GetTextureOrMesh(UUID assetID, bool texture)
{
lock (Manager.AssetsReceived)
{
// Don't request assets more than once.
if (Manager.AssetsReceived.ContainsKey(assetID))
return;
Manager.AssetsReceived[assetID] = false;
}
try
{
if (texture)
Client.Assets.RequestImage(assetID, ImageType.Normal, Asset_TextureCallback_Texture);
else
Client.Assets.RequestMesh(assetID, Asset_MeshCallback);
}
catch (Exception e)
{
m_log.Warn(string.Format("Error requesting {0} {1}", texture ? "texture" : "mesh", assetID), e);
}
}
public void Asset_TextureCallback_Texture(TextureRequestState state, AssetTexture assetTexture)
{
if (state == TextureRequestState.Finished)
{
lock (Manager.AssetsReceived)
Manager.AssetsReceived[assetTexture.AssetID] = true;
}
}
private void Asset_MeshCallback(bool success, AssetMesh assetMesh)
{
lock (Manager.AssetsReceived)
Manager.AssetsReceived[assetMesh.AssetID] = success;
}
public void Asset_ReceivedCallback(AssetDownload transfer, Asset asset)
{
lock (Manager.AssetsReceived)
Manager.AssetsReceived[asset.AssetID] = true;
// if (wear == "save")
// {
// SaveAsset((AssetWearable) asset);
// }
}
private void PacketReceivedDebugHandler(object o, PacketReceivedEventArgs args)
{
Packet p = args.Packet;
Header h = p.Header;
Simulator s = args.Simulator;
m_log.DebugFormat(
"[BOT]: Bot {0} received from {1} packet {2} #{3}, rel {4}, res {5}",
Name, s.Name, p.Type, h.Sequence, h.Reliable, h.Resent);
}
}
}
| |
using System;
using System.IO;
using Org.BouncyCastle.Asn1;
using Org.BouncyCastle.Asn1.X509;
using Org.BouncyCastle.Crypto;
using Org.BouncyCastle.Crypto.Agreement;
using Org.BouncyCastle.Crypto.Agreement.Srp;
using Org.BouncyCastle.Crypto.Digests;
using Org.BouncyCastle.Crypto.IO;
using Org.BouncyCastle.Crypto.Parameters;
using Org.BouncyCastle.Math;
using Org.BouncyCastle.Security;
using Org.BouncyCastle.Utilities;
namespace Org.BouncyCastle.Crypto.Tls
{
/// <summary>
/// TLS 1.1 SRP key exchange.
/// </summary>
internal class TlsSrpKeyExchange
: TlsKeyExchange
{
protected TlsClientContext context;
protected KeyExchangeAlgorithm keyExchange;
protected TlsSigner tlsSigner;
protected byte[] identity;
protected byte[] password;
protected AsymmetricKeyParameter serverPublicKey = null;
protected byte[] s = null;
protected BigInteger B = null;
protected Srp6Client srpClient = new Srp6Client();
internal TlsSrpKeyExchange(TlsClientContext context, KeyExchangeAlgorithm keyExchange,
byte[] identity, byte[] password)
{
switch (keyExchange)
{
case KeyExchangeAlgorithm.SRP:
this.tlsSigner = null;
break;
case KeyExchangeAlgorithm.SRP_RSA:
this.tlsSigner = new TlsRsaSigner();
break;
case KeyExchangeAlgorithm.SRP_DSS:
this.tlsSigner = new TlsDssSigner();
break;
default:
throw new ArgumentException("unsupported key exchange algorithm", "keyExchange");
}
this.context = context;
this.keyExchange = keyExchange;
this.identity = identity;
this.password = password;
}
public virtual void SkipServerCertificate()
{
if (tlsSigner != null)
{
throw new TlsFatalAlert(AlertDescription.unexpected_message);
}
}
public virtual void ProcessServerCertificate(Certificate serverCertificate)
{
if (tlsSigner == null)
{
throw new TlsFatalAlert(AlertDescription.unexpected_message);
}
X509CertificateStructure x509Cert = serverCertificate.certs[0];
SubjectPublicKeyInfo keyInfo = x509Cert.SubjectPublicKeyInfo;
try
{
this.serverPublicKey = PublicKeyFactory.CreateKey(keyInfo);
}
// catch (RuntimeException)
catch (Exception)
{
throw new TlsFatalAlert(AlertDescription.unsupported_certificate);
}
if (!tlsSigner.IsValidPublicKey(this.serverPublicKey))
{
throw new TlsFatalAlert(AlertDescription.certificate_unknown);
}
TlsUtilities.ValidateKeyUsage(x509Cert, KeyUsage.DigitalSignature);
// TODO
/*
* Perform various checks per RFC2246 7.4.2: "Unless otherwise specified, the
* signing algorithm for the certificate must be the same as the algorithm for the
* certificate key."
*/
}
public virtual void SkipServerKeyExchange()
{
throw new TlsFatalAlert(AlertDescription.unexpected_message);
}
public virtual void ProcessServerKeyExchange(Stream input)
{
SecurityParameters securityParameters = context.SecurityParameters;
Stream sigIn = input;
ISigner signer = null;
if (tlsSigner != null)
{
signer = InitSigner(tlsSigner, securityParameters);
sigIn = new SignerStream(input, signer, null);
}
byte[] NBytes = TlsUtilities.ReadOpaque16(sigIn);
byte[] gBytes = TlsUtilities.ReadOpaque16(sigIn);
byte[] sBytes = TlsUtilities.ReadOpaque8(sigIn);
byte[] BBytes = TlsUtilities.ReadOpaque16(sigIn);
if (signer != null)
{
byte[] sigByte = TlsUtilities.ReadOpaque16(input);
if (!signer.VerifySignature(sigByte))
{
throw new TlsFatalAlert(AlertDescription.bad_certificate);
}
}
BigInteger N = new BigInteger(1, NBytes);
BigInteger g = new BigInteger(1, gBytes);
// TODO Validate group parameters (see RFC 5054)
//throw new TlsFatalAlert(AlertDescription.insufficient_security);
this.s = sBytes;
/*
* RFC 5054 2.5.3: The client MUST abort the handshake with an "illegal_parameter"
* alert if B % N = 0.
*/
try
{
this.B = Srp6Utilities.ValidatePublicValue(N, new BigInteger(1, BBytes));
}
catch (CryptoException)
{
throw new TlsFatalAlert(AlertDescription.illegal_parameter);
}
this.srpClient.Init(N, g, new Sha1Digest(), context.SecureRandom);
}
public virtual void ValidateCertificateRequest(CertificateRequest certificateRequest)
{
throw new TlsFatalAlert(AlertDescription.unexpected_message);
}
public virtual void SkipClientCredentials()
{
// OK
}
public virtual void ProcessClientCredentials(TlsCredentials clientCredentials)
{
throw new TlsFatalAlert(AlertDescription.internal_error);
}
public virtual void GenerateClientKeyExchange(Stream output)
{
byte[] keData = BigIntegers.AsUnsignedByteArray(srpClient.GenerateClientCredentials(s,
this.identity, this.password));
TlsUtilities.WriteOpaque16(keData, output);
}
public virtual byte[] GeneratePremasterSecret()
{
try
{
// TODO Check if this needs to be a fixed size
return BigIntegers.AsUnsignedByteArray(srpClient.CalculateSecret(B));
}
catch (CryptoException)
{
throw new TlsFatalAlert(AlertDescription.illegal_parameter);
}
}
protected virtual ISigner InitSigner(TlsSigner tlsSigner, SecurityParameters securityParameters)
{
ISigner signer = tlsSigner.CreateVerifyer(this.serverPublicKey);
signer.BlockUpdate(securityParameters.clientRandom, 0, securityParameters.clientRandom.Length);
signer.BlockUpdate(securityParameters.serverRandom, 0, securityParameters.serverRandom.Length);
return signer;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Net;
using Newtonsoft.Json;
using SteamKit2;
namespace SteamBot
{
public class Trade
{
#region Static
// Static properties
public static string SteamCommunityDomain = "steamcommunity.com";
public static string SteamTradeUrl = "http://steamcommunity.com/trade/{0}/";
public static Schema CurrentSchema = null;
protected static void PrintConsole (String line, ConsoleColor color = ConsoleColor.White)
{
Console.ForegroundColor = color;
Console.WriteLine (line);
Console.ForegroundColor = ConsoleColor.White;
}
#endregion
#region Properties
public SteamID MeSID;
public SteamID OtherSID;
// Generic Trade info
public bool MeReady = false;
public bool OtherReady = false;
bool tradeStarted = false;
public DateTime TradeStart;
public DateTime LastAction;
public int MaximumTradeTime = 180;
public int MaximumActionGap = 30;
// Items
public List<ulong> MyOfferedItems = new List<ulong> ();
public List<ulong> OtherOfferedItems = new List<ulong> ();
public Inventory OtherInventory;
public Inventory MyInventory;
// Internal properties needed for Steam API.
protected string baseTradeURL;
protected string steamLogin;
protected string sessionId;
protected string apiKey;
protected int version = 1;
protected int logpos;
protected int numEvents;
protected dynamic OtherItems;
protected dynamic MyItems;
#endregion
#region Events
public delegate void ErrorHandler (string error);
public event ErrorHandler OnError;
public delegate void TimeoutHandler ();
public event TimeoutHandler OnTimeout;
public delegate void SuccessfulInit ();
public event SuccessfulInit OnAfterInit;
public delegate void UserAddItemHandler (Schema.Item schemaItem, Inventory.Item inventoryItem);
public event UserAddItemHandler OnUserAddItem;
public delegate void UserRemoveItemHandler (Schema.Item schemaItem, Inventory.Item inventoryItem);
public event UserAddItemHandler OnUserRemoveItem;
public delegate void MessageHandler (string msg);
public event MessageHandler OnMessage;
public delegate void UserSetReadyStateHandler (bool ready);
public event UserSetReadyStateHandler OnUserSetReady;
public delegate void UserAcceptHandler ();
public event UserAcceptHandler OnUserAccept;
#endregion
public Trade (SteamID me, SteamID other, string sessionId, string token, string apiKey, TradeListener listener = null)
{
MeSID = me;
OtherSID = other;
this.sessionId = sessionId;
steamLogin = token;
this.apiKey = apiKey;
AddListener (listener);
baseTradeURL = String.Format (SteamTradeUrl, OtherSID.ConvertToUInt64 ());
// try to poll for the first time
try
{
Poll ();
}
catch (Exception)
{
PrintConsole ("Failed to connect to Steam!", ConsoleColor.Red);
if (OnError != null)
OnError("There was a problem connecting to Steam Trading.");
}
try
{
// fetch the other player's inventory
OtherItems = GetInventory (OtherSID);
if (OtherItems == null || OtherItems.success != "true")
{
throw new Exception ("Could not fetch other player's inventory via Trading!");
}
// fetch our inventory
MyItems = GetInventory (MeSID);
if (MyItems == null || MyItems.success != "true")
{
throw new Exception ("Could not fetch own inventory via Trading!");
}
// fetch other player's inventory from the Steam API.
OtherInventory = Inventory.FetchInventory(OtherSID.ConvertToUInt64(), apiKey);
if (OtherInventory == null)
{
throw new Exception ("Could not fetch other player's inventory via Steam API!");
}
// fetch our inventory from the Steam API.
MyInventory = Inventory.FetchInventory(MeSID.ConvertToUInt64(), apiKey);
if (MyInventory == null)
{
throw new Exception ("Could not fetch own inventory via Steam API!");
}
// check that the schema was already successfully fetched
if (CurrentSchema == null)
{
throw new Exception ("It seems the item schema was not fetched correctly!");
}
if (OnAfterInit != null)
OnAfterInit();
}
catch (Exception e)
{
if (OnError != null)
OnError ("I'm having a problem getting one of our backpacks. The Steam Community might be down. Ensure your backpack isn't private.");
Console.WriteLine (e);
}
}
public void Poll ()
{
if (!tradeStarted)
{
tradeStarted = true;
TradeStart = DateTime.Now;
LastAction = DateTime.Now;
}
StatusObj status = GetStatus ();
if (status.events != null && numEvents != status.events.Length)
{
int numLoops = status.events.Length - numEvents;
numEvents = status.events.Length;
for (int i = numLoops; i > 0; i--)
{
int EventID;
if (numLoops == 1)
{
EventID = numEvents - 1;
}
else
{
EventID = numEvents - i;
}
bool isBot = status.events [EventID].steamid == MeSID.ConvertToUInt64 ().ToString ();
/*
*
* Trade Action ID's
*
* 0 = Add item (itemid = "assetid")
* 1 = remove item (itemid = "assetid")
* 2 = Toggle ready
* 3 = Toggle not ready
* 4
* 5
* 6
* 7 = Chat (message = "text")
*
*/
ulong itemID;
switch (status.events [EventID].action)
{
case 0:
itemID = (ulong)status.events [EventID].assetid;
if (isBot)
MyOfferedItems.Add (itemID);
else
{
OtherOfferedItems.Add (itemID);
Inventory.Item item = OtherInventory.GetItem (itemID);
Schema.Item schemaItem = CurrentSchema.GetItem (item.Defindex);
OnUserAddItem (schemaItem, item);
}
break;
case 1:
itemID = (ulong)status.events [EventID].assetid;
if (isBot)
MyOfferedItems.Remove (itemID);
else
{
OtherOfferedItems.Remove (itemID);
Inventory.Item item = OtherInventory.GetItem (itemID);
Schema.Item schemaItem = CurrentSchema.GetItem (item.Defindex);
OnUserRemoveItem (schemaItem, item);
}
break;
case 2:
if (!isBot)
{
OtherReady = true;
OnUserSetReady (true);
}
break;
case 3:
if (!isBot)
{
OtherReady = false;
OnUserSetReady (false);
}
break;
case 4:
if (!isBot)
{
OnUserAccept ();
}
break;
case 7:
if (!isBot)
{
OnMessage (status.events [EventID].text);
}
break;
default:
PrintConsole ("Unknown Event ID: " + status.events [EventID].action, ConsoleColor.Red);
break;
}
if (!isBot)
LastAction = DateTime.Now;
}
} else {
// check if the user is AFK
var now = DateTime.Now;
DateTime actionTimeout = LastAction.AddSeconds (MaximumActionGap);
int untilActionTimeout = (int) Math.Round ((actionTimeout - now).TotalSeconds);
DateTime tradeTimeout = TradeStart.AddSeconds (MaximumTradeTime);
int untilTradeTimeout = (int) Math.Round ((tradeTimeout - now).TotalSeconds);
if (untilActionTimeout <= 0 || untilTradeTimeout <= 0)
{
if (OnTimeout != null)
{
OnTimeout();
}
CancelTrade();
}
else if (untilActionTimeout <= 15 && untilActionTimeout % 5 == 0)
{
SendMessage ("Are You AFK? The trade will be canceled in " + untilActionTimeout + " seconds if you don't do something.");
}
}
// Update Local Variables
if (status.them != null)
{
OtherReady = status.them.ready == 1 ? true : false;
MeReady = status.me.ready == 1 ? true : false;
}
// Update version
if (status.newversion)
{
version = status.version;
}
if (status.logpos != 0)
{
logpos = status.logpos;
}
}
#region Trade interaction
public string SendMessage (string msg)
{
var data = new NameValueCollection ();
data.Add ("sessionid", Uri.UnescapeDataString (sessionId));
data.Add ("message", msg);
data.Add ("logpos", "" + logpos);
data.Add ("version", "" + version);
return Fetch (baseTradeURL + "chat", "POST", data);
}
public bool AddItemByDefindex (int defindex, int slot)
{
List<Inventory.Item> items = MyInventory.GetItemsByDefindex (defindex);
if (items[0] != null)
{
AddItem (items[0].Id, slot);
return true;
}
return false;
}
public void AddItem (ulong itemid, int slot)
{
var data = new NameValueCollection ();
data.Add ("sessionid", Uri.UnescapeDataString (sessionId));
data.Add ("appid", "440");
data.Add ("contextid", "2");
data.Add ("itemid", "" + itemid);
data.Add ("slot", "" + slot);
Fetch (baseTradeURL + "additem", "POST", data);
}
public void RemoveItem (ulong itemid, int slot)
{
var data = new NameValueCollection ();
data.Add ("sessionid", Uri.UnescapeDataString (sessionId));
data.Add ("appid", "440");
data.Add ("contextid", "2");
data.Add ("itemid", "" + itemid);
data.Add ("slot", "" + slot);
Fetch (baseTradeURL + "removeitem", "POST", data);
}
public void SetReady (bool ready)
{
var data = new NameValueCollection ();
data.Add ("sessionid", Uri.UnescapeDataString (sessionId));
data.Add ("ready", ready ? "true" : "false");
data.Add ("version", "" + version);
Fetch (baseTradeURL + "toggleready", "POST", data);
}
public dynamic AcceptTrade ()
{
var data = new NameValueCollection ();
data.Add ("sessionid", Uri.UnescapeDataString (sessionId));
data.Add ("version", "" + version);
string response = Fetch (baseTradeURL + "confirm", "POST", data);
return JsonConvert.DeserializeObject (response);
}
public void CancelTrade ()
{
var data = new NameValueCollection ();
data.Add ("sessionid", Uri.UnescapeDataString (sessionId));
Fetch (baseTradeURL + "cancel", "POST", data);
}
#endregion
public void AddListener (TradeListener listener)
{
OnError += listener.OnError;
OnTimeout += listener.OnTimeout;
OnAfterInit += listener.OnAfterInit;
OnUserAddItem += listener.OnUserAddItem;
OnUserRemoveItem += listener.OnUserRemoveItem;
OnMessage += listener.OnMessage;
OnUserSetReady += listener.OnUserSetReadyState;
OnUserAccept += listener.OnUserAccept;
listener.trade = this;
}
protected StatusObj GetStatus ()
{
var data = new NameValueCollection ();
data.Add ("sessionid", Uri.UnescapeDataString (sessionId));
data.Add ("logpos", "" + logpos);
data.Add ("version", "" + version);
string response = Fetch (baseTradeURL + "tradestatus", "POST", data);
return JsonConvert.DeserializeObject<StatusObj> (response);
}
protected dynamic GetInventory (SteamID steamid)
{
string url = String.Format (
"http://steamcommunity.com/profiles/{0}/inventory/json/440/2/?trading=1",
steamid.ConvertToUInt64 ()
);
try
{
string response = Fetch (url, "GET", null, false);
return JsonConvert.DeserializeObject (response);
}
catch (Exception)
{
return JsonConvert.DeserializeObject ("{\"success\":\"false\"}");
}
}
protected string Fetch (string url, string method, NameValueCollection data = null, bool sendLoginData = true)
{
var cookies = new CookieContainer();
if (sendLoginData)
{
cookies.Add (new Cookie ("sessionid", sessionId, String.Empty, SteamCommunityDomain));
cookies.Add (new Cookie ("steamLogin", steamLogin, String.Empty, SteamCommunityDomain));
}
return SteamWeb.Fetch (url, method, data, cookies);
}
public abstract class TradeListener
{
public Trade trade;
public abstract void OnError (string error);
public abstract void OnTimeout ();
public abstract void OnAfterInit ();
public abstract void OnUserAddItem (Schema.Item schemaItem, Inventory.Item inventoryItem);
public abstract void OnUserRemoveItem (Schema.Item schemaItem, Inventory.Item inventoryItem);
public abstract void OnMessage (string msg);
public abstract void OnUserSetReadyState (bool ready);
public abstract void OnUserAccept ();
}
#region JSON classes
protected class StatusObj
{
public string error { get; set; }
public bool newversion { get; set; }
public bool success { get; set; }
public long trade_status { get; set; }
public int version { get; set; }
public int logpos { get; set; }
public TradeUserObj me { get; set; }
public TradeUserObj them { get; set; }
public TradeEvent[] events { get; set; }
}
protected class TradeEvent
{
public string steamid { get; set; }
public int action { get; set; }
public ulong timestamp { get; set; }
public int appid { get; set; }
public string text { get; set; }
public int contextid { get; set; }
public ulong assetid { get; set; }
}
protected class TradeUserObj
{
public int ready { get; set; }
public int confirmed { get; set; }
public int sec_since_touch { get; set; }
}
#endregion
}
}
| |
using System;
using System.IO;
using System.Runtime.InteropServices;
using System.Text;
using Microsoft.Win32.SafeHandles;
// ReSharper disable UnusedMember.Local
// ReSharper disable FieldCanBeMadeReadOnly.Local
// ReSharper disable InconsistentNaming
// ReSharper disable MemberCanBePrivate.Local
// ReSharper disable UnusedMember.Global
// Fetched from: http://www.codeproject.com/Articles/15633/Manipulating-NTFS-Junction-Points-in-NET
namespace erl.Oracle.TnsNames
{
/// <summary>
/// Provides access to NTFS junction points in .Net.
/// </summary>
internal static class JunctionPoint
{
/// <summary>
/// The file or directory is not a reparse point.
/// </summary>
private const int ERROR_NOT_A_REPARSE_POINT = 4390;
/// <summary>
/// The reparse point attribute cannot be set because it conflicts with an existing attribute.
/// </summary>
private const int ERROR_REPARSE_ATTRIBUTE_CONFLICT = 4391;
/// <summary>
/// The data present in the reparse point buffer is invalid.
/// </summary>
private const int ERROR_INVALID_REPARSE_DATA = 4392;
/// <summary>
/// The tag present in the reparse point buffer is invalid.
/// </summary>
private const int ERROR_REPARSE_TAG_INVALID = 4393;
/// <summary>
/// There is a mismatch between the tag specified in the request and the tag present in the reparse point.
/// </summary>
private const int ERROR_REPARSE_TAG_MISMATCH = 4394;
/// <summary>
/// Command to set the reparse point data block.
/// </summary>
private const int FSCTL_SET_REPARSE_POINT = 0x000900A4;
/// <summary>
/// Command to get the reparse point data block.
/// </summary>
private const int FSCTL_GET_REPARSE_POINT = 0x000900A8;
/// <summary>
/// Command to delete the reparse point data base.
/// </summary>
private const int FSCTL_DELETE_REPARSE_POINT = 0x000900AC;
/// <summary>
/// Reparse point tag used to identify mount points and junction points.
/// </summary>
private const uint IO_REPARSE_TAG_MOUNT_POINT = 0xA0000003;
/// <summary>
/// This prefix indicates to NTFS that the path is to be treated as a non-interpreted
/// path in the virtual file system.
/// </summary>
private const string NonInterpretedPathPrefix = @"\??\";
[Flags]
private enum EFileAccess : uint
{
GenericRead = 0x80000000,
GenericWrite = 0x40000000,
GenericExecute = 0x20000000,
GenericAll = 0x10000000,
}
[Flags]
private enum EFileShare : uint
{
None = 0x00000000,
Read = 0x00000001,
Write = 0x00000002,
Delete = 0x00000004,
}
private enum ECreationDisposition : uint
{
New = 1,
CreateAlways = 2,
OpenExisting = 3,
OpenAlways = 4,
TruncateExisting = 5,
}
[Flags]
private enum EFileAttributes : uint
{
Readonly = 0x00000001,
Hidden = 0x00000002,
System = 0x00000004,
Directory = 0x00000010,
Archive = 0x00000020,
Device = 0x00000040,
Normal = 0x00000080,
Temporary = 0x00000100,
SparseFile = 0x00000200,
ReparsePoint = 0x00000400,
Compressed = 0x00000800,
Offline = 0x00001000,
NotContentIndexed = 0x00002000,
Encrypted = 0x00004000,
Write_Through = 0x80000000,
Overlapped = 0x40000000,
NoBuffering = 0x20000000,
RandomAccess = 0x10000000,
SequentialScan = 0x08000000,
DeleteOnClose = 0x04000000,
BackupSemantics = 0x02000000,
PosixSemantics = 0x01000000,
OpenReparsePoint = 0x00200000,
OpenNoRecall = 0x00100000,
FirstPipeInstance = 0x00080000
}
[StructLayout(LayoutKind.Sequential)]
private struct REPARSE_DATA_BUFFER
{
/// <summary>
/// Reparse point tag. Must be a Microsoft reparse point tag.
/// </summary>
public uint ReparseTag;
/// <summary>
/// Size, in bytes, of the data after the Reserved member. This can be calculated by:
/// (4 * sizeof(ushort)) + SubstituteNameLength + PrintNameLength +
/// (namesAreNullTerminated ? 2 * sizeof(char) : 0);
/// </summary>
public ushort ReparseDataLength;
/// <summary>
/// Reserved; do not use.
/// </summary>
public ushort Reserved;
/// <summary>
/// Offset, in bytes, of the substitute name string in the PathBuffer array.
/// </summary>
public ushort SubstituteNameOffset;
/// <summary>
/// Length, in bytes, of the substitute name string. If this string is null-terminated,
/// SubstituteNameLength does not include space for the null character.
/// </summary>
public ushort SubstituteNameLength;
/// <summary>
/// Offset, in bytes, of the print name string in the PathBuffer array.
/// </summary>
public ushort PrintNameOffset;
/// <summary>
/// Length, in bytes, of the print name string. If this string is null-terminated,
/// PrintNameLength does not include space for the null character.
/// </summary>
public ushort PrintNameLength;
/// <summary>
/// A buffer containing the unicode-encoded path string. The path string contains
/// the substitute name string and print name string.
/// </summary>
[MarshalAs(UnmanagedType.ByValArray, SizeConst = 0x3FF0)]
public byte[] PathBuffer;
}
[DllImport("kernel32.dll", CharSet = CharSet.Ansi, SetLastError = true)]
private static extern bool DeviceIoControl(IntPtr hDevice, uint dwIoControlCode,
IntPtr InBuffer, int nInBufferSize,
IntPtr OutBuffer, int nOutBufferSize,
out int pBytesReturned, IntPtr lpOverlapped);
[DllImport("kernel32.dll", SetLastError = true)]
private static extern IntPtr CreateFile(
string lpFileName,
EFileAccess dwDesiredAccess,
EFileShare dwShareMode,
IntPtr lpSecurityAttributes,
ECreationDisposition dwCreationDisposition,
EFileAttributes dwFlagsAndAttributes,
IntPtr hTemplateFile);
/// <summary>
/// Creates a junction point from the specified directory to the specified target directory.
/// </summary>
/// <remarks>
/// Only works on NTFS.
/// </remarks>
/// <param name="junctionPoint">The junction point path</param>
/// <param name="targetDir">The target directory</param>
/// <param name="overwrite">If true overwrites an existing reparse point or empty directory</param>
/// <exception cref="IOException">Thrown when the junction point could not be created or when
/// an existing directory was found and <paramref name="overwrite" /> if false</exception>
public static void Create(string junctionPoint, string targetDir, bool overwrite)
{
targetDir = Path.GetFullPath(targetDir);
if (!Directory.Exists(targetDir))
throw new IOException("Target path does not exist or is not a directory.");
if (Directory.Exists(junctionPoint))
{
if (!overwrite)
throw new IOException("Directory already exists and overwrite parameter is false.");
}
else
{
Directory.CreateDirectory(junctionPoint);
}
using (SafeFileHandle handle = OpenReparsePoint(junctionPoint, EFileAccess.GenericWrite))
{
byte[] targetDirBytes = Encoding.Unicode.GetBytes(NonInterpretedPathPrefix + Path.GetFullPath(targetDir));
REPARSE_DATA_BUFFER reparseDataBuffer = new REPARSE_DATA_BUFFER
{
ReparseTag = IO_REPARSE_TAG_MOUNT_POINT,
ReparseDataLength = (ushort) (targetDirBytes.Length + 12),
SubstituteNameOffset = 0,
SubstituteNameLength = (ushort) targetDirBytes.Length,
PrintNameOffset = (ushort) (targetDirBytes.Length + 2),
PrintNameLength = 0,
PathBuffer = new byte[0x3ff0]
};
Array.Copy(targetDirBytes, reparseDataBuffer.PathBuffer, targetDirBytes.Length);
int inBufferSize = Marshal.SizeOf(reparseDataBuffer);
IntPtr inBuffer = Marshal.AllocHGlobal(inBufferSize);
try
{
Marshal.StructureToPtr(reparseDataBuffer, inBuffer, false);
int bytesReturned;
bool result = DeviceIoControl(handle.DangerousGetHandle(), FSCTL_SET_REPARSE_POINT,
inBuffer, targetDirBytes.Length + 20, IntPtr.Zero, 0, out bytesReturned, IntPtr.Zero);
if (!result)
ThrowLastWin32Error("Unable to create junction point.");
}
finally
{
Marshal.FreeHGlobal(inBuffer);
}
}
}
/// <summary>
/// Deletes a junction point at the specified source directory along with the directory itself.
/// Does nothing if the junction point does not exist.
/// </summary>
/// <remarks>
/// Only works on NTFS.
/// </remarks>
/// <param name="junctionPoint">The junction point path</param>
public static void Delete(string junctionPoint)
{
if (!Directory.Exists(junctionPoint))
{
if (File.Exists(junctionPoint))
throw new IOException("Path is not a junction point.");
return;
}
using (SafeFileHandle handle = OpenReparsePoint(junctionPoint, EFileAccess.GenericWrite))
{
REPARSE_DATA_BUFFER reparseDataBuffer = new REPARSE_DATA_BUFFER
{
ReparseTag = IO_REPARSE_TAG_MOUNT_POINT,
ReparseDataLength = 0,
PathBuffer = new byte[0x3ff0]
};
int inBufferSize = Marshal.SizeOf(reparseDataBuffer);
IntPtr inBuffer = Marshal.AllocHGlobal(inBufferSize);
try
{
Marshal.StructureToPtr(reparseDataBuffer, inBuffer, false);
int bytesReturned;
bool result = DeviceIoControl(handle.DangerousGetHandle(), FSCTL_DELETE_REPARSE_POINT,
inBuffer, 8, IntPtr.Zero, 0, out bytesReturned, IntPtr.Zero);
if (!result)
ThrowLastWin32Error("Unable to delete junction point.");
}
finally
{
Marshal.FreeHGlobal(inBuffer);
}
try
{
Directory.Delete(junctionPoint);
}
catch (IOException ex)
{
throw new IOException("Unable to delete junction point.", ex);
}
}
}
/// <summary>
/// Determines whether the specified path exists and refers to a junction point.
/// </summary>
/// <param name="path">The junction point path</param>
/// <returns>True if the specified path represents a junction point</returns>
/// <exception cref="IOException">Thrown if the specified path is invalid
/// or some other error occurs</exception>
public static bool Exists(string path)
{
if (! Directory.Exists(path))
return false;
using (SafeFileHandle handle = OpenReparsePoint(path, EFileAccess.GenericRead))
{
string target = InternalGetTarget(handle);
return target != null;
}
}
/// <summary>
/// Gets the target of the specified junction point.
/// </summary>
/// <remarks>
/// Only works on NTFS.
/// </remarks>
/// <param name="junctionPoint">The junction point path</param>
/// <returns>The target of the junction point</returns>
/// <exception cref="IOException">Thrown when the specified path does not
/// exist, is invalid, is not a junction point, or some other error occurs</exception>
public static string GetTarget(string junctionPoint)
{
using (SafeFileHandle handle = OpenReparsePoint(junctionPoint, EFileAccess.GenericRead))
{
string target = InternalGetTarget(handle);
if (target == null)
throw new IOException("Path is not a junction point.");
return target;
}
}
private static string InternalGetTarget(SafeFileHandle handle)
{
int outBufferSize = Marshal.SizeOf<REPARSE_DATA_BUFFER>();
IntPtr outBuffer = Marshal.AllocHGlobal(outBufferSize);
try
{
int bytesReturned;
bool result = DeviceIoControl(handle.DangerousGetHandle(), FSCTL_GET_REPARSE_POINT,
IntPtr.Zero, 0, outBuffer, outBufferSize, out bytesReturned, IntPtr.Zero);
if (!result)
{
int error = Marshal.GetLastWin32Error();
if (error == ERROR_NOT_A_REPARSE_POINT)
return null;
ThrowLastWin32Error("Unable to get information about junction point.");
}
var reparseDataBuffer = Marshal.PtrToStructure<REPARSE_DATA_BUFFER>(outBuffer);
if (reparseDataBuffer.ReparseTag != IO_REPARSE_TAG_MOUNT_POINT)
return null;
string targetDir = Encoding.Unicode.GetString(reparseDataBuffer.PathBuffer,
reparseDataBuffer.SubstituteNameOffset, reparseDataBuffer.SubstituteNameLength);
if (targetDir.StartsWith(NonInterpretedPathPrefix))
targetDir = targetDir.Substring(NonInterpretedPathPrefix.Length);
return targetDir;
}
finally
{
Marshal.FreeHGlobal(outBuffer);
}
}
private static SafeFileHandle OpenReparsePoint(string reparsePoint, EFileAccess accessMode)
{
SafeFileHandle reparsePointHandle = new SafeFileHandle(CreateFile(reparsePoint, accessMode,
EFileShare.Read | EFileShare.Write | EFileShare.Delete,
IntPtr.Zero, ECreationDisposition.OpenExisting,
EFileAttributes.BackupSemantics | EFileAttributes.OpenReparsePoint, IntPtr.Zero), true);
if (Marshal.GetLastWin32Error() != 0)
ThrowLastWin32Error("Unable to open reparse point.");
return reparsePointHandle;
}
private static void ThrowLastWin32Error(string message)
{
throw new IOException(message, Marshal.GetExceptionForHR(Marshal.GetHRForLastWin32Error()));
}
}
}
| |
using System;
using System.Diagnostics;
using System.Collections.Specialized;
using System.IO;
using System.Reflection;
using System.Windows.Forms;
using System.Configuration.Install;
using System.ComponentModel;
using Microsoft.Win32;
using System.Runtime.InteropServices;
using Comzept.Genesis.Licensing;
namespace Comzept.Genesis.Licensing.Installer
{
[RunInstaller(true)]
public partial class NetrixInstaller : System.Configuration.Install.Installer
{
public NetrixInstaller()
{
InitializeComponent();
}
private string InstallPath = "";
private string AssemblyName = "";
private string ControlName = "";
private bool envDte=true;
public override void Install(System.Collections.IDictionary stateSaver)
{
string strAssemblyFiles = "";
try
{
AssemblyName = Context.Parameters["AssemblyName"];
ControlName = Context.Parameters["ControlName"];
InstallPath = Context.Parameters["FullPath"];
// Add assemblies to GAC
strAssemblyFiles = base.Context.Parameters["gacname"].ToString();
if (strAssemblyFiles.Length > 0)
{
System.EnterpriseServices.Internal.Publish publish = new System.EnterpriseServices.Internal.Publish();
bool GacInstall = (Context.Parameters["CHECKBOXGAC"].ToString() == "1");
InstallerSupport.Instance.SetNetRixRegistryValue((GacInstall) ? "1" : "0", "GAC");
foreach (string strAssemblyFile in strAssemblyFiles.Split('|'))
{
try
{
string[] strAssembly = strAssemblyFile.Split('*');
if (strAssembly[0].EndsWith(".Core.dll"))
{
//publish.GacInstall(Path.Combine(Path.Combine(InstallPath, "Control"), strAssembly[0]));
InstallerSupport.Instance.SetNetRixRegistryValue(InstallPath, InstallerSupport.GetProperty("InstallPathKey"));
}
if (GacInstall)
{
publish.GacInstall(Path.Combine(Path.Combine(InstallPath, "Control"), strAssembly[0]));
//MessageBox.Show("Gac installed");
}
// MessageBox.Show(strAssembly[0] + " : " + strAssembly[1]);
if (!(strAssembly[0].EndsWith(".Core.dll")))
{
Type type = GetEditorType(strAssembly[0], strAssembly[1], GacInstall);
if (type == null)
{
continue;
}
else
{
InstallerSupport.Instance.SetLICFileContent(null, "", type, "Features", false);
}
}
}
catch
{
continue;
}
}
}
// Toolbox
if (Context.Parameters["CHECKBOXTOOLBOX"].ToString() == "1")
{
try
{
AddRemoveItems AddRemoveItemsDlg = new AddRemoveItems(strAssemblyFiles);
AddRemoveItemsDlg.InstallPath = InstallPath;
AddRemoveItemsDlg.ShowDialog();
}
catch
{
envDte=false;
}
}
// Check for license condition
//if (Context.Parameters["CHECKBOXLICENSE"].ToString() == "1")
//{
// LicenseManager licenseManager = new LicenseManager();
// licenseManager.ShowDialog();
// // LicenseStorage.LaunchProg();
//}
} // try
catch (Exception ex)
{
if (envDte!=false)
MessageBox.Show(ex.Message + "\n\n" + ex.StackTrace, "Error running Installer");
throw new InstallException(ex.Message, ex); //rethrow
}
base.Install(stateSaver);
}
public override void Uninstall(System.Collections.IDictionary savedState)
{
//#if DEBUG
// MessageBox.Show("Uninstalling");
//#endif
RegistryKey rk = Registry.CurrentUser.OpenSubKey(InstallerSupport.GetProperty("RegKey"), true);
AssemblyName = Context.Parameters["AssemblyName"];
string InstallPath = rk.GetValue(InstallerSupport.GetProperty("InstallPathKey")).ToString();
string strAssemblyFiles = base.Context.Parameters["gacname"].ToString();
try
{
base.Uninstall(savedState);
// remove from GAC
if (strAssemblyFiles.Length > 0)
{
System.EnterpriseServices.Internal.Publish publish = new System.EnterpriseServices.Internal.Publish();
foreach (string strAssemblyFile in strAssemblyFiles.Split('|'))
{
try
{
string[] strAssembly = strAssemblyFile.Split('*');
// if (!strAssembly[0].EndsWith(".Core.dll"))
//{
publish.GacRemove(Path.Combine(Path.Combine(InstallPath, "Control"), strAssembly[0]));
//}
}
catch { }
}
}
}
catch { }
try
{
// toolbox
AddRemoveItems AddRemoveItemsDlg = new AddRemoveItems(strAssemblyFiles);
AddRemoveItemsDlg.RemoveItem();
// license
if (strAssemblyFiles.Contains("Core.dll"))
{
rk.DeleteValue("GAC");
rk.DeleteValue("InstallPath");
}
}
catch { };
// rk.DeleteSubKeyTree("Features");
rk.Close();
// } catch {
// }
}
private Type GetEditorType(string assembly, string type, bool gacInstall)
{
string full = "";
Assembly a;
if (!gacInstall)
{
if (InstallPath == null || InstallPath.Length == 0)
{
full = Path.Combine(Path.GetDirectoryName(this.GetType().Assembly.GetName().Name), assembly);
}
else
{
full = Path.Combine(Path.Combine(InstallPath, "Control"), assembly);
}
// MessageBox.Show(full, "FullPath");
a = Assembly.LoadFile(full);
}
else
{
a = System.Reflection.Assembly.LoadWithPartialName(assembly.Substring(0, assembly.Length - 4));
}
#if DEBUG
MessageBox.Show(type);
#endif
#if DEBUG
// MessageBox.Show(full, "Assembly.LoadFile");
#endif
if (a != null)
return a.GetType(type, true, true);
return null;
}
protected override void OnAfterInstall(System.Collections.IDictionary savedState)
{
try
{
if (Context.Parameters["CHECKBOXLICENSE"].ToString() == "1")
{
Process.Start(Path.Combine(InstallPath, "LicenseManagerPro.exe"));
base.OnAfterInstall(savedState);
}
}
catch
{
}
}
public override void Commit(System.Collections.IDictionary savedState)
{
MessageBox.Show("commit");
base.Commit(savedState);
//if (Context.Parameters["CHECKBOXLICENSE"].ToString() == "1")
//{
// LicenseManager licenseManager = new LicenseManager();
// licenseManager.ShowDialog();
#if DEBUG
MessageBox.Show("Committing");
#endif
}
public override void Rollback(System.Collections.IDictionary savedState)
{
RegistryKey rk = Registry.CurrentUser.OpenSubKey(InstallerSupport.GetProperty("RegKey"));
AssemblyName = Context.Parameters["AssemblyName"];
string InstallPath = rk.GetValue(InstallerSupport.GetProperty("InstallPath")).ToString();
string strAssemblyFiles = base.Context.Parameters["gacname"].ToString();
try
{
base.Rollback(savedState);
// remove from GAC
if (strAssemblyFiles.Length > 0)
{
System.EnterpriseServices.Internal.Publish publish = new System.EnterpriseServices.Internal.Publish();
foreach (string strAssemblyFile in strAssemblyFiles.Split('|'))
{
try
{
string[] strAssembly = strAssemblyFile.Split('*');
publish.GacRemove(Path.Combine(Path.Combine(InstallPath, "Control"), strAssembly[0]));
}
catch { }
}
}
}
catch { }
// toolbox
AddRemoveItems AddRemoveItemsDlg = new AddRemoveItems(strAssemblyFiles);
AddRemoveItemsDlg.RemoveItem();
// license
rk.DeleteValue("GAC");
rk.DeleteValue("InstallPath");
// rk.DeleteSubKeyTree("Features");
rk.Close();
#if DEBUG
MessageBox.Show("Rolling back");
#endif
}
}
}
| |
/*
* QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
* Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using NUnit.Framework;
using Python.Runtime;
using QuantConnect.Algorithm;
using QuantConnect.Data;
using QuantConnect.Indicators;
using QuantConnect.Tests.Engine.DataFeeds;
namespace QuantConnect.Tests.Indicators
{
[TestFixture]
public class PythonIndicatorTests : CommonIndicatorTests<IBaseData>
{
protected override IndicatorBase<IBaseData> CreateIndicator()
{
using (Py.GIL())
{
var module = PythonEngine.ModuleFromString(
Guid.NewGuid().ToString(),
@"
from AlgorithmImports import *
from collections import deque
class CustomSimpleMovingAverage(PythonIndicator):
def __init__(self, name, period):
self.Name = name
self.Value = 0
self.queue = deque(maxlen=period)
# Update method is mandatory
def Update(self, input):
self.queue.appendleft(input.Value)
count = len(self.queue)
self.Value = np.sum(self.queue) / count
return count == self.queue.maxlen
"
);
var indicator = module.GetAttr("CustomSimpleMovingAverage")
.Invoke("custom".ToPython(), 14.ToPython());
return new PythonIndicator(indicator);
}
}
protected override string TestFileName => "spy_with_indicators.txt";
protected override string TestColumnName => "SMA14";
protected override void RunTestIndicator(IndicatorBase<IBaseData> indicator)
{
var first = true;
var closeIndex = -1;
var targetIndex = -1;
foreach (var line in File.ReadLines(Path.Combine("TestData", TestFileName)))
{
var parts = line.Split(new[] { ',' }, StringSplitOptions.None);
if (first)
{
first = false;
for (var i = 0; i < parts.Length; i++)
{
if (parts[i].Trim() == "Close")
{
closeIndex = i;
}
if (parts[i].Trim() == TestColumnName)
{
targetIndex = i;
}
}
if (closeIndex * targetIndex < 0)
{
Assert.Fail($"Didn't find one of 'Close' or '{line}' in the header: ", TestColumnName);
}
continue;
}
var close = decimal.Parse(parts[closeIndex], CultureInfo.InvariantCulture);
var date = Time.ParseDate(parts[0]);
var data = new IndicatorDataPoint(date, close);
indicator.Update(data);
if (!indicator.IsReady || parts[targetIndex].Trim() == string.Empty)
{
continue;
}
var expected = double.Parse(parts[targetIndex], CultureInfo.InvariantCulture);
Assertion.Invoke(indicator, expected);
}
}
protected override Action<IndicatorBase<IBaseData>, double> Assertion => (indicator, expected) =>
Assert.AreEqual(expected, (double) indicator.Current.Value, 1e-2);
[Test]
public void SmaComputesCorrectly()
{
var sma = new SimpleMovingAverage(4);
var data = new[] {1m, 10m, 100m, 1000m, 10000m, 1234m, 56789m};
var seen = new List<decimal>();
for (int i = 0; i < data.Length; i++)
{
var datum = data[i];
seen.Add(datum);
sma.Update(new IndicatorDataPoint(DateTime.Now.AddSeconds(i), datum));
Assert.AreEqual(Enumerable.Reverse(seen).Take(sma.Period).Average(), sma.Current.Value);
}
}
[Test]
public void IsReadyAfterPeriodUpdates()
{
var sma = new SimpleMovingAverage(3);
sma.Update(DateTime.UtcNow, 1m);
sma.Update(DateTime.UtcNow, 1m);
Assert.IsFalse(sma.IsReady);
sma.Update(DateTime.UtcNow, 1m);
Assert.IsTrue(sma.IsReady);
}
[Test]
public override void ResetsProperly()
{
var sma = new SimpleMovingAverage(3);
foreach (var data in TestHelper.GetDataStream(4))
{
sma.Update(data);
}
Assert.IsTrue(sma.IsReady);
sma.Reset();
TestHelper.AssertIndicatorIsInDefaultState(sma);
TestHelper.AssertIndicatorIsInDefaultState(sma.RollingSum);
sma.Update(DateTime.UtcNow, 2.0m);
Assert.AreEqual(sma.Current.Value, 2.0m);
}
[Test]
public void RegisterPythonCustomIndicatorProperly()
{
var algorithm = new QCAlgorithm();
algorithm.SubscriptionManager.SetDataManager(new DataManagerStub(algorithm));
var spy = algorithm.AddEquity("SPY").Symbol;
using (Py.GIL())
{
var module = PythonEngine.ModuleFromString(
Guid.NewGuid().ToString(),
@"
from AlgorithmImports import *
class GoodCustomIndicator(PythonIndicator):
def __init__(self):
self.Value = 0
def Update(self, input):
self.Value = input.Value
return True
class BadCustomIndicator(PythonIndicator):
def __init__(self):
self.Valeu = 0
def Update(self, input):
self.Value = input.Value
return True"
);
var goodIndicator = module.GetAttr("GoodCustomIndicator").Invoke();
Assert.DoesNotThrow(() => algorithm.RegisterIndicator(spy, goodIndicator, Resolution.Minute));
var actual = algorithm.SubscriptionManager.Subscriptions
.FirstOrDefault(config => config.TickType == TickType.Trade)
.Consolidators.Count;
Assert.AreEqual(1, actual);
var badIndicator = module.GetAttr("BadCustomIndicator").Invoke();
Assert.Throws<NotImplementedException>(() => algorithm.RegisterIndicator(spy, badIndicator, Resolution.Minute));
}
}
[Test]
public void AllPythonRegisterIndicatorCases()
{
//This test covers all three cases of registering a indicator through Python
//Setup algorithm and Equity
var algorithm = new QCAlgorithm();
algorithm.SubscriptionManager.SetDataManager(new DataManagerStub(algorithm));
var spy = algorithm.AddEquity("SPY").Symbol;
//Setup Python Indicator and Consolidator
using (Py.GIL())
{
var module = PythonEngine.ModuleFromString(Guid.NewGuid().ToString(),
"from AlgorithmImports import *\n" +
"consolidator = QuoteBarConsolidator(timedelta(days = 5)) \n" +
"timeDelta = timedelta(days=2)\n" +
"class CustomIndicator(PythonIndicator):\n" +
" def __init__(self):\n" +
" self.Value = 0\n" +
" def Update(self, input):\n" +
" self.Value = input.Value\n" +
" return True\n" +
"class CustomConsolidator(PythonConsolidator):\n" +
" def __init__(self):\n" +
" self.InputType = QuoteBar\n" +
" self.OutputType = QuoteBar\n" +
" self.Consolidated = None\n" +
" self.WorkingData = None\n"
);
//Get our variables from Python
var PyIndicator = module.GetAttr("CustomIndicator").Invoke();
var PyConsolidator = module.GetAttr("CustomConsolidator").Invoke();
var Consolidator = module.GetAttr("consolidator");
algorithm.SubscriptionManager.AddConsolidator(spy, Consolidator);
var TimeDelta = module.GetAttr("timeDelta");
//Test 1: Using a C# Consolidator; Should convert consolidator into IDataConsolidator
Assert.DoesNotThrow(() => algorithm.RegisterIndicator(spy, PyIndicator, Consolidator));
//Test 2: Using a Python Consolidator; Should wrap consolidator
Assert.DoesNotThrow(() => algorithm.RegisterIndicator(spy, PyIndicator, PyConsolidator));
//Test 3: Using a timedelta object; Should convert timedelta to timespan
Assert.DoesNotThrow(() => algorithm.RegisterIndicator(spy, PyIndicator, TimeDelta));
}
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/bigtable/v1/bigtable_service.proto
// Original file comments:
// Copyright 2016 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#region Designer generated code
using System;
using System.Threading;
using System.Threading.Tasks;
using Grpc.Core;
namespace Google.Bigtable.V1 {
/// <summary>
/// Service for reading from and writing to existing Bigtables.
/// </summary>
public static class BigtableService
{
static readonly string __ServiceName = "google.bigtable.v1.BigtableService";
static readonly Marshaller<global::Google.Bigtable.V1.ReadRowsRequest> __Marshaller_ReadRowsRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Bigtable.V1.ReadRowsRequest.Parser.ParseFrom);
static readonly Marshaller<global::Google.Bigtable.V1.ReadRowsResponse> __Marshaller_ReadRowsResponse = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Bigtable.V1.ReadRowsResponse.Parser.ParseFrom);
static readonly Marshaller<global::Google.Bigtable.V1.SampleRowKeysRequest> __Marshaller_SampleRowKeysRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Bigtable.V1.SampleRowKeysRequest.Parser.ParseFrom);
static readonly Marshaller<global::Google.Bigtable.V1.SampleRowKeysResponse> __Marshaller_SampleRowKeysResponse = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Bigtable.V1.SampleRowKeysResponse.Parser.ParseFrom);
static readonly Marshaller<global::Google.Bigtable.V1.MutateRowRequest> __Marshaller_MutateRowRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Bigtable.V1.MutateRowRequest.Parser.ParseFrom);
static readonly Marshaller<global::Google.Protobuf.WellKnownTypes.Empty> __Marshaller_Empty = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Protobuf.WellKnownTypes.Empty.Parser.ParseFrom);
static readonly Marshaller<global::Google.Bigtable.V1.MutateRowsRequest> __Marshaller_MutateRowsRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Bigtable.V1.MutateRowsRequest.Parser.ParseFrom);
static readonly Marshaller<global::Google.Bigtable.V1.MutateRowsResponse> __Marshaller_MutateRowsResponse = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Bigtable.V1.MutateRowsResponse.Parser.ParseFrom);
static readonly Marshaller<global::Google.Bigtable.V1.CheckAndMutateRowRequest> __Marshaller_CheckAndMutateRowRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Bigtable.V1.CheckAndMutateRowRequest.Parser.ParseFrom);
static readonly Marshaller<global::Google.Bigtable.V1.CheckAndMutateRowResponse> __Marshaller_CheckAndMutateRowResponse = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Bigtable.V1.CheckAndMutateRowResponse.Parser.ParseFrom);
static readonly Marshaller<global::Google.Bigtable.V1.ReadModifyWriteRowRequest> __Marshaller_ReadModifyWriteRowRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Bigtable.V1.ReadModifyWriteRowRequest.Parser.ParseFrom);
static readonly Marshaller<global::Google.Bigtable.V1.Row> __Marshaller_Row = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Bigtable.V1.Row.Parser.ParseFrom);
static readonly Method<global::Google.Bigtable.V1.ReadRowsRequest, global::Google.Bigtable.V1.ReadRowsResponse> __Method_ReadRows = new Method<global::Google.Bigtable.V1.ReadRowsRequest, global::Google.Bigtable.V1.ReadRowsResponse>(
MethodType.ServerStreaming,
__ServiceName,
"ReadRows",
__Marshaller_ReadRowsRequest,
__Marshaller_ReadRowsResponse);
static readonly Method<global::Google.Bigtable.V1.SampleRowKeysRequest, global::Google.Bigtable.V1.SampleRowKeysResponse> __Method_SampleRowKeys = new Method<global::Google.Bigtable.V1.SampleRowKeysRequest, global::Google.Bigtable.V1.SampleRowKeysResponse>(
MethodType.ServerStreaming,
__ServiceName,
"SampleRowKeys",
__Marshaller_SampleRowKeysRequest,
__Marshaller_SampleRowKeysResponse);
static readonly Method<global::Google.Bigtable.V1.MutateRowRequest, global::Google.Protobuf.WellKnownTypes.Empty> __Method_MutateRow = new Method<global::Google.Bigtable.V1.MutateRowRequest, global::Google.Protobuf.WellKnownTypes.Empty>(
MethodType.Unary,
__ServiceName,
"MutateRow",
__Marshaller_MutateRowRequest,
__Marshaller_Empty);
static readonly Method<global::Google.Bigtable.V1.MutateRowsRequest, global::Google.Bigtable.V1.MutateRowsResponse> __Method_MutateRows = new Method<global::Google.Bigtable.V1.MutateRowsRequest, global::Google.Bigtable.V1.MutateRowsResponse>(
MethodType.Unary,
__ServiceName,
"MutateRows",
__Marshaller_MutateRowsRequest,
__Marshaller_MutateRowsResponse);
static readonly Method<global::Google.Bigtable.V1.CheckAndMutateRowRequest, global::Google.Bigtable.V1.CheckAndMutateRowResponse> __Method_CheckAndMutateRow = new Method<global::Google.Bigtable.V1.CheckAndMutateRowRequest, global::Google.Bigtable.V1.CheckAndMutateRowResponse>(
MethodType.Unary,
__ServiceName,
"CheckAndMutateRow",
__Marshaller_CheckAndMutateRowRequest,
__Marshaller_CheckAndMutateRowResponse);
static readonly Method<global::Google.Bigtable.V1.ReadModifyWriteRowRequest, global::Google.Bigtable.V1.Row> __Method_ReadModifyWriteRow = new Method<global::Google.Bigtable.V1.ReadModifyWriteRowRequest, global::Google.Bigtable.V1.Row>(
MethodType.Unary,
__ServiceName,
"ReadModifyWriteRow",
__Marshaller_ReadModifyWriteRowRequest,
__Marshaller_Row);
/// <summary>Service descriptor</summary>
public static global::Google.Protobuf.Reflection.ServiceDescriptor Descriptor
{
get { return global::Google.Bigtable.V1.BigtableServiceReflection.Descriptor.Services[0]; }
}
/// <summary>Base class for server-side implementations of BigtableService</summary>
public abstract class BigtableServiceBase
{
/// <summary>
/// Streams back the contents of all requested rows, optionally applying
/// the same Reader filter to each. Depending on their size, rows may be
/// broken up across multiple responses, but atomicity of each row will still
/// be preserved.
/// </summary>
public virtual global::System.Threading.Tasks.Task ReadRows(global::Google.Bigtable.V1.ReadRowsRequest request, IServerStreamWriter<global::Google.Bigtable.V1.ReadRowsResponse> responseStream, ServerCallContext context)
{
throw new RpcException(new Status(StatusCode.Unimplemented, ""));
}
/// <summary>
/// Returns a sample of row keys in the table. The returned row keys will
/// delimit contiguous sections of the table of approximately equal size,
/// which can be used to break up the data for distributed tasks like
/// mapreduces.
/// </summary>
public virtual global::System.Threading.Tasks.Task SampleRowKeys(global::Google.Bigtable.V1.SampleRowKeysRequest request, IServerStreamWriter<global::Google.Bigtable.V1.SampleRowKeysResponse> responseStream, ServerCallContext context)
{
throw new RpcException(new Status(StatusCode.Unimplemented, ""));
}
/// <summary>
/// Mutates a row atomically. Cells already present in the row are left
/// unchanged unless explicitly changed by 'mutation'.
/// </summary>
public virtual global::System.Threading.Tasks.Task<global::Google.Protobuf.WellKnownTypes.Empty> MutateRow(global::Google.Bigtable.V1.MutateRowRequest request, ServerCallContext context)
{
throw new RpcException(new Status(StatusCode.Unimplemented, ""));
}
/// <summary>
/// Mutates multiple rows in a batch. Each individual row is mutated
/// atomically as in MutateRow, but the entire batch is not executed
/// atomically.
/// </summary>
public virtual global::System.Threading.Tasks.Task<global::Google.Bigtable.V1.MutateRowsResponse> MutateRows(global::Google.Bigtable.V1.MutateRowsRequest request, ServerCallContext context)
{
throw new RpcException(new Status(StatusCode.Unimplemented, ""));
}
/// <summary>
/// Mutates a row atomically based on the output of a predicate Reader filter.
/// </summary>
public virtual global::System.Threading.Tasks.Task<global::Google.Bigtable.V1.CheckAndMutateRowResponse> CheckAndMutateRow(global::Google.Bigtable.V1.CheckAndMutateRowRequest request, ServerCallContext context)
{
throw new RpcException(new Status(StatusCode.Unimplemented, ""));
}
/// <summary>
/// Modifies a row atomically, reading the latest existing timestamp/value from
/// the specified columns and writing a new value at
/// max(existing timestamp, current server time) based on pre-defined
/// read/modify/write rules. Returns the new contents of all modified cells.
/// </summary>
public virtual global::System.Threading.Tasks.Task<global::Google.Bigtable.V1.Row> ReadModifyWriteRow(global::Google.Bigtable.V1.ReadModifyWriteRowRequest request, ServerCallContext context)
{
throw new RpcException(new Status(StatusCode.Unimplemented, ""));
}
}
/// <summary>Client for BigtableService</summary>
public class BigtableServiceClient : ClientBase<BigtableServiceClient>
{
/// <summary>Creates a new client for BigtableService</summary>
/// <param name="channel">The channel to use to make remote calls.</param>
public BigtableServiceClient(Channel channel) : base(channel)
{
}
/// <summary>Creates a new client for BigtableService that uses a custom <c>CallInvoker</c>.</summary>
/// <param name="callInvoker">The callInvoker to use to make remote calls.</param>
public BigtableServiceClient(CallInvoker callInvoker) : base(callInvoker)
{
}
/// <summary>Protected parameterless constructor to allow creation of test doubles.</summary>
protected BigtableServiceClient() : base()
{
}
/// <summary>Protected constructor to allow creation of configured clients.</summary>
/// <param name="configuration">The client configuration.</param>
protected BigtableServiceClient(ClientBaseConfiguration configuration) : base(configuration)
{
}
/// <summary>
/// Streams back the contents of all requested rows, optionally applying
/// the same Reader filter to each. Depending on their size, rows may be
/// broken up across multiple responses, but atomicity of each row will still
/// be preserved.
/// </summary>
public virtual AsyncServerStreamingCall<global::Google.Bigtable.V1.ReadRowsResponse> ReadRows(global::Google.Bigtable.V1.ReadRowsRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return ReadRows(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Streams back the contents of all requested rows, optionally applying
/// the same Reader filter to each. Depending on their size, rows may be
/// broken up across multiple responses, but atomicity of each row will still
/// be preserved.
/// </summary>
public virtual AsyncServerStreamingCall<global::Google.Bigtable.V1.ReadRowsResponse> ReadRows(global::Google.Bigtable.V1.ReadRowsRequest request, CallOptions options)
{
return CallInvoker.AsyncServerStreamingCall(__Method_ReadRows, null, options, request);
}
/// <summary>
/// Returns a sample of row keys in the table. The returned row keys will
/// delimit contiguous sections of the table of approximately equal size,
/// which can be used to break up the data for distributed tasks like
/// mapreduces.
/// </summary>
public virtual AsyncServerStreamingCall<global::Google.Bigtable.V1.SampleRowKeysResponse> SampleRowKeys(global::Google.Bigtable.V1.SampleRowKeysRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return SampleRowKeys(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Returns a sample of row keys in the table. The returned row keys will
/// delimit contiguous sections of the table of approximately equal size,
/// which can be used to break up the data for distributed tasks like
/// mapreduces.
/// </summary>
public virtual AsyncServerStreamingCall<global::Google.Bigtable.V1.SampleRowKeysResponse> SampleRowKeys(global::Google.Bigtable.V1.SampleRowKeysRequest request, CallOptions options)
{
return CallInvoker.AsyncServerStreamingCall(__Method_SampleRowKeys, null, options, request);
}
/// <summary>
/// Mutates a row atomically. Cells already present in the row are left
/// unchanged unless explicitly changed by 'mutation'.
/// </summary>
public virtual global::Google.Protobuf.WellKnownTypes.Empty MutateRow(global::Google.Bigtable.V1.MutateRowRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return MutateRow(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Mutates a row atomically. Cells already present in the row are left
/// unchanged unless explicitly changed by 'mutation'.
/// </summary>
public virtual global::Google.Protobuf.WellKnownTypes.Empty MutateRow(global::Google.Bigtable.V1.MutateRowRequest request, CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_MutateRow, null, options, request);
}
/// <summary>
/// Mutates a row atomically. Cells already present in the row are left
/// unchanged unless explicitly changed by 'mutation'.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Protobuf.WellKnownTypes.Empty> MutateRowAsync(global::Google.Bigtable.V1.MutateRowRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return MutateRowAsync(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Mutates a row atomically. Cells already present in the row are left
/// unchanged unless explicitly changed by 'mutation'.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Protobuf.WellKnownTypes.Empty> MutateRowAsync(global::Google.Bigtable.V1.MutateRowRequest request, CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_MutateRow, null, options, request);
}
/// <summary>
/// Mutates multiple rows in a batch. Each individual row is mutated
/// atomically as in MutateRow, but the entire batch is not executed
/// atomically.
/// </summary>
public virtual global::Google.Bigtable.V1.MutateRowsResponse MutateRows(global::Google.Bigtable.V1.MutateRowsRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return MutateRows(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Mutates multiple rows in a batch. Each individual row is mutated
/// atomically as in MutateRow, but the entire batch is not executed
/// atomically.
/// </summary>
public virtual global::Google.Bigtable.V1.MutateRowsResponse MutateRows(global::Google.Bigtable.V1.MutateRowsRequest request, CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_MutateRows, null, options, request);
}
/// <summary>
/// Mutates multiple rows in a batch. Each individual row is mutated
/// atomically as in MutateRow, but the entire batch is not executed
/// atomically.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Bigtable.V1.MutateRowsResponse> MutateRowsAsync(global::Google.Bigtable.V1.MutateRowsRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return MutateRowsAsync(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Mutates multiple rows in a batch. Each individual row is mutated
/// atomically as in MutateRow, but the entire batch is not executed
/// atomically.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Bigtable.V1.MutateRowsResponse> MutateRowsAsync(global::Google.Bigtable.V1.MutateRowsRequest request, CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_MutateRows, null, options, request);
}
/// <summary>
/// Mutates a row atomically based on the output of a predicate Reader filter.
/// </summary>
public virtual global::Google.Bigtable.V1.CheckAndMutateRowResponse CheckAndMutateRow(global::Google.Bigtable.V1.CheckAndMutateRowRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return CheckAndMutateRow(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Mutates a row atomically based on the output of a predicate Reader filter.
/// </summary>
public virtual global::Google.Bigtable.V1.CheckAndMutateRowResponse CheckAndMutateRow(global::Google.Bigtable.V1.CheckAndMutateRowRequest request, CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_CheckAndMutateRow, null, options, request);
}
/// <summary>
/// Mutates a row atomically based on the output of a predicate Reader filter.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Bigtable.V1.CheckAndMutateRowResponse> CheckAndMutateRowAsync(global::Google.Bigtable.V1.CheckAndMutateRowRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return CheckAndMutateRowAsync(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Mutates a row atomically based on the output of a predicate Reader filter.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Bigtable.V1.CheckAndMutateRowResponse> CheckAndMutateRowAsync(global::Google.Bigtable.V1.CheckAndMutateRowRequest request, CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_CheckAndMutateRow, null, options, request);
}
/// <summary>
/// Modifies a row atomically, reading the latest existing timestamp/value from
/// the specified columns and writing a new value at
/// max(existing timestamp, current server time) based on pre-defined
/// read/modify/write rules. Returns the new contents of all modified cells.
/// </summary>
public virtual global::Google.Bigtable.V1.Row ReadModifyWriteRow(global::Google.Bigtable.V1.ReadModifyWriteRowRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return ReadModifyWriteRow(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Modifies a row atomically, reading the latest existing timestamp/value from
/// the specified columns and writing a new value at
/// max(existing timestamp, current server time) based on pre-defined
/// read/modify/write rules. Returns the new contents of all modified cells.
/// </summary>
public virtual global::Google.Bigtable.V1.Row ReadModifyWriteRow(global::Google.Bigtable.V1.ReadModifyWriteRowRequest request, CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_ReadModifyWriteRow, null, options, request);
}
/// <summary>
/// Modifies a row atomically, reading the latest existing timestamp/value from
/// the specified columns and writing a new value at
/// max(existing timestamp, current server time) based on pre-defined
/// read/modify/write rules. Returns the new contents of all modified cells.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Bigtable.V1.Row> ReadModifyWriteRowAsync(global::Google.Bigtable.V1.ReadModifyWriteRowRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return ReadModifyWriteRowAsync(request, new CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Modifies a row atomically, reading the latest existing timestamp/value from
/// the specified columns and writing a new value at
/// max(existing timestamp, current server time) based on pre-defined
/// read/modify/write rules. Returns the new contents of all modified cells.
/// </summary>
public virtual AsyncUnaryCall<global::Google.Bigtable.V1.Row> ReadModifyWriteRowAsync(global::Google.Bigtable.V1.ReadModifyWriteRowRequest request, CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_ReadModifyWriteRow, null, options, request);
}
protected override BigtableServiceClient NewInstance(ClientBaseConfiguration configuration)
{
return new BigtableServiceClient(configuration);
}
}
/// <summary>Creates service definition that can be registered with a server</summary>
public static ServerServiceDefinition BindService(BigtableServiceBase serviceImpl)
{
return ServerServiceDefinition.CreateBuilder()
.AddMethod(__Method_ReadRows, serviceImpl.ReadRows)
.AddMethod(__Method_SampleRowKeys, serviceImpl.SampleRowKeys)
.AddMethod(__Method_MutateRow, serviceImpl.MutateRow)
.AddMethod(__Method_MutateRows, serviceImpl.MutateRows)
.AddMethod(__Method_CheckAndMutateRow, serviceImpl.CheckAndMutateRow)
.AddMethod(__Method_ReadModifyWriteRow, serviceImpl.ReadModifyWriteRow).Build();
}
}
}
#endregion
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Collections.Generic;
using System.Runtime.ExceptionServices;
using System.Security.Claims;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Components.Authorization;
using Microsoft.AspNetCore.Components.RenderTree;
using Microsoft.AspNetCore.Components.WebAssembly.Authentication.Internal;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using Microsoft.JSInterop;
using Moq;
using Xunit;
namespace Microsoft.AspNetCore.Components.WebAssembly.Authentication
{
public class RemoteAuthenticatorCoreTests
{
private const string _action = nameof(RemoteAuthenticatorViewCore<RemoteAuthenticationState>.Action);
private const string _onLogInSucceded = nameof(RemoteAuthenticatorViewCore<RemoteAuthenticationState>.OnLogInSucceeded);
private const string _onLogOutSucceeded = nameof(RemoteAuthenticatorViewCore<RemoteAuthenticationState>.OnLogOutSucceeded);
[Fact]
public async Task AuthenticationManager_Throws_ForInvalidAction()
{
// Arrange
var remoteAuthenticator = new RemoteAuthenticatorViewCore<RemoteAuthenticationState>();
var parameters = ParameterView.FromDictionary(new Dictionary<string, object>
{
[_action] = ""
});
// Act & assert
await Assert.ThrowsAsync<InvalidOperationException>(() => remoteAuthenticator.SetParametersAsync(parameters));
}
[Fact]
public async Task AuthenticationManager_Login_NavigatesToReturnUrlOnSuccess()
{
// Arrange
var (remoteAuthenticator, renderer, authServiceMock, jsRuntime) = CreateAuthenticationManager(
"https://www.example.com/base/authentication/login?returnUrl=https://www.example.com/base/fetchData");
authServiceMock.SignInCallback = _ => Task.FromResult(new RemoteAuthenticationResult<RemoteAuthenticationState>()
{
Status = RemoteAuthenticationStatus.Success,
State = remoteAuthenticator.AuthenticationState
});
var parameters = ParameterView.FromDictionary(new Dictionary<string, object>
{
[_action] = RemoteAuthenticationActions.LogIn
});
// Act
await renderer.Dispatcher.InvokeAsync<object>(() => remoteAuthenticator.SetParametersAsync(parameters));
// Assert
Assert.Equal("https://www.example.com/base/fetchData", jsRuntime.LastInvocation.args[0]);
}
[Fact]
public async Task AuthenticationManager_Login_DoesNothingOnRedirect()
{
// Arrange
var originalUrl = "https://www.example.com/base/authentication/login?returnUrl=https://www.example.com/base/fetchData";
var (remoteAuthenticator, renderer, authServiceMock, jsRuntime) = CreateAuthenticationManager(originalUrl);
authServiceMock.SignInCallback = s => Task.FromResult(new RemoteAuthenticationResult<RemoteAuthenticationState>()
{
Status = RemoteAuthenticationStatus.Redirect,
State = remoteAuthenticator.AuthenticationState
});
var parameters = ParameterView.FromDictionary(new Dictionary<string, object>
{
[_action] = RemoteAuthenticationActions.LogIn
});
// Act
await renderer.Dispatcher.InvokeAsync<object>(() => remoteAuthenticator.SetParametersAsync(parameters));
// Assert
Assert.Equal(originalUrl, remoteAuthenticator.Navigation.Uri);
}
[Fact]
public async Task AuthenticationManager_Login_NavigatesToLoginFailureOnError()
{
// Arrange
var (remoteAuthenticator, renderer, authServiceMock, jsRuntime) = CreateAuthenticationManager(
"https://www.example.com/base/authentication/login?returnUrl=https://www.example.com/base/fetchData");
authServiceMock.SignInCallback = s => Task.FromResult(new RemoteAuthenticationResult<RemoteAuthenticationState>()
{
Status = RemoteAuthenticationStatus.Failure,
ErrorMessage = "There was an error trying to log in"
});
var parameters = ParameterView.FromDictionary(new Dictionary<string, object>
{
[_action] = RemoteAuthenticationActions.LogIn
});
// Act
await renderer.Dispatcher.InvokeAsync<object>(() => remoteAuthenticator.SetParametersAsync(parameters));
// Assert
Assert.Equal("https://www.example.com/base/authentication/login-failed", remoteAuthenticator.Navigation.Uri.ToString());
}
[Fact]
public async Task AuthenticationManager_LoginCallback_ThrowsOnRedirectResult()
{
// Arrange
var (remoteAuthenticator, renderer, authServiceMock, jsRuntime) = CreateAuthenticationManager(
"https://www.example.com/base/authentication/login?returnUrl=https://www.example.com/base/fetchData");
authServiceMock.CompleteSignInCallback = s => Task.FromResult(new RemoteAuthenticationResult<RemoteAuthenticationState>()
{
Status = RemoteAuthenticationStatus.Redirect
});
var parameters = ParameterView.FromDictionary(new Dictionary<string, object>
{
[_action] = RemoteAuthenticationActions.LogInCallback
});
await Assert.ThrowsAsync<InvalidOperationException>(
async () => await renderer.Dispatcher.InvokeAsync<object>(async () =>
{
await remoteAuthenticator.SetParametersAsync(parameters);
return null;
}));
}
[Fact]
public async Task AuthenticationManager_LoginCallback_DoesNothingOnOperationCompleted()
{
// Arrange
var originalUrl = "https://www.example.com/base/authentication/login-callback?code=1234";
var (remoteAuthenticator, renderer, authServiceMock, jsRuntime) = CreateAuthenticationManager(
originalUrl);
authServiceMock.CompleteSignInCallback = s => Task.FromResult(new RemoteAuthenticationResult<RemoteAuthenticationState>()
{
Status = RemoteAuthenticationStatus.OperationCompleted
});
var parameters = ParameterView.FromDictionary(new Dictionary<string, object>
{
[_action] = RemoteAuthenticationActions.LogInCallback
});
// Act
await renderer.Dispatcher.InvokeAsync<object>(() => remoteAuthenticator.SetParametersAsync(parameters));
// Assert
Assert.Equal(originalUrl, remoteAuthenticator.Navigation.Uri);
}
[Fact]
public async Task AuthenticationManager_LoginCallback_NavigatesToReturnUrlFromStateOnSuccess()
{
// Arrange
var (remoteAuthenticator, renderer, authServiceMock, jsRuntime) = CreateAuthenticationManager(
"https://www.example.com/base/authentication/login-callback?code=1234");
var fetchDataUrl = "https://www.example.com/base/fetchData";
remoteAuthenticator.AuthenticationState.ReturnUrl = fetchDataUrl;
authServiceMock.CompleteSignInCallback = s => Task.FromResult(new RemoteAuthenticationResult<RemoteAuthenticationState>()
{
Status = RemoteAuthenticationStatus.Success,
State = remoteAuthenticator.AuthenticationState
});
var loggingSucceededCalled = false;
var parameters = ParameterView.FromDictionary(new Dictionary<string, object>
{
[_action] = RemoteAuthenticationActions.LogInCallback,
[_onLogInSucceded] = new EventCallbackFactory().Create<RemoteAuthenticationState>(
remoteAuthenticator,
(state) => loggingSucceededCalled = true),
});
// Act
await renderer.Dispatcher.InvokeAsync<object>(() => remoteAuthenticator.SetParametersAsync(parameters));
// Assert
Assert.Equal(fetchDataUrl, jsRuntime.LastInvocation.args[0]);
Assert.True(loggingSucceededCalled);
}
[Fact]
public async Task AuthenticationManager_LoginCallback_NavigatesToLoginFailureOnError()
{
// Arrange
var (remoteAuthenticator, renderer, authServiceMock, jsRuntime) = CreateAuthenticationManager(
"https://www.example.com/base/authentication/login-callback?code=1234");
var fetchDataUrl = "https://www.example.com/base/fetchData";
remoteAuthenticator.AuthenticationState.ReturnUrl = fetchDataUrl;
authServiceMock.CompleteSignInCallback = s => Task.FromResult(new RemoteAuthenticationResult<RemoteAuthenticationState>()
{
Status = RemoteAuthenticationStatus.Failure,
ErrorMessage = "There was an error trying to log in"
});
var parameters = ParameterView.FromDictionary(new Dictionary<string, object>
{
[_action] = RemoteAuthenticationActions.LogInCallback
});
// Act
await renderer.Dispatcher.InvokeAsync<object>(() => remoteAuthenticator.SetParametersAsync(parameters));
// Assert
Assert.Equal(
"https://www.example.com/base/authentication/login-failed?message=There was an error trying to log in",
jsRuntime.LastInvocation.args[0]);
}
[Fact]
public async Task AuthenticationManager_Logout_NavigatesToReturnUrlOnSuccess()
{
// Arrange
var (remoteAuthenticator, renderer, authServiceMock, jsRuntime) = CreateAuthenticationManager(
"https://www.example.com/base/authentication/logout?returnUrl=https://www.example.com/base/");
authServiceMock.GetAuthenticatedUserCallback = () => new ValueTask<ClaimsPrincipal>(new ClaimsPrincipal(new ClaimsIdentity("Test")));
authServiceMock.SignOutCallback = s => Task.FromResult(new RemoteAuthenticationResult<RemoteAuthenticationState>()
{
Status = RemoteAuthenticationStatus.Success,
State = remoteAuthenticator.AuthenticationState
});
var parameters = ParameterView.FromDictionary(new Dictionary<string, object>
{
[_action] = RemoteAuthenticationActions.LogOut
});
// Act
await renderer.Dispatcher.InvokeAsync<object>(() => remoteAuthenticator.SetParametersAsync(parameters));
// Assert
Assert.Equal("https://www.example.com/base/", jsRuntime.LastInvocation.args[0]);
}
[Fact]
public async Task AuthenticationManager_Logout_NavigatesToDefaultReturnUrlWhenNoReturnUrlIsPresent()
{
// Arrange
var (remoteAuthenticator, renderer, authServiceMock, jsRuntime) = CreateAuthenticationManager(
"https://www.example.com/base/authentication/logout");
authServiceMock.GetAuthenticatedUserCallback = () => new ValueTask<ClaimsPrincipal>(new ClaimsPrincipal(new ClaimsIdentity("Test")));
authServiceMock.SignOutCallback = s => Task.FromResult(new RemoteAuthenticationResult<RemoteAuthenticationState>()
{
Status = RemoteAuthenticationStatus.Success,
State = remoteAuthenticator.AuthenticationState
});
var parameters = ParameterView.FromDictionary(new Dictionary<string, object>
{
[_action] = RemoteAuthenticationActions.LogOut
});
// Act
await renderer.Dispatcher.InvokeAsync<object>(() => remoteAuthenticator.SetParametersAsync(parameters));
// Assert
Assert.Equal("https://www.example.com/base/authentication/logged-out", jsRuntime.LastInvocation.args[0]);
}
[Fact]
public async Task AuthenticationManager_Logout_DoesNothingOnRedirect()
{
// Arrange
var originalUrl = "https://www.example.com/base/authentication/login?returnUrl=https://www.example.com/base/fetchData";
var (remoteAuthenticator, renderer, authServiceMock, jsRuntime) = CreateAuthenticationManager(originalUrl);
authServiceMock.GetAuthenticatedUserCallback = () => new ValueTask<ClaimsPrincipal>(new ClaimsPrincipal(new ClaimsIdentity("Test")));
authServiceMock.SignOutCallback = s => Task.FromResult(new RemoteAuthenticationResult<RemoteAuthenticationState>()
{
Status = RemoteAuthenticationStatus.Redirect,
State = remoteAuthenticator.AuthenticationState
});
var parameters = ParameterView.FromDictionary(new Dictionary<string, object>
{
[_action] = RemoteAuthenticationActions.LogOut
});
// Act
await renderer.Dispatcher.InvokeAsync<object>(() => remoteAuthenticator.SetParametersAsync(parameters));
// Assert
Assert.Equal(originalUrl, remoteAuthenticator.Navigation.Uri);
}
[Fact]
public async Task AuthenticationManager_Logout_RedirectsToFailureOnInvalidSignOutState()
{
// Arrange
var (remoteAuthenticator, renderer, authServiceMock, jsRuntime) = CreateAuthenticationManager(
"https://www.example.com/base/authentication/logout?returnUrl=https://www.example.com/base/fetchData");
if(remoteAuthenticator.SignOutManager is TestSignOutSessionStateManager testManager)
{
testManager.SignOutState = false;
}
var parameters = ParameterView.FromDictionary(new Dictionary<string, object>
{
[_action] = RemoteAuthenticationActions.LogOut
});
// Act
await renderer.Dispatcher.InvokeAsync<object>(() => remoteAuthenticator.SetParametersAsync(parameters));
// Assert
Assert.Equal(
"https://www.example.com/base/authentication/logout-failed?message=The%20logout%20was%20not%20initiated%20from%20within%20the%20page.",
remoteAuthenticator.Navigation.Uri);
}
[Fact]
public async Task AuthenticationManager_Logout_NavigatesToLogoutFailureOnError()
{
// Arrange
var (remoteAuthenticator, renderer, authServiceMock, jsRuntime) = CreateAuthenticationManager(
"https://www.example.com/base/authentication/logout?returnUrl=https://www.example.com/base/fetchData");
authServiceMock.GetAuthenticatedUserCallback = () => new ValueTask<ClaimsPrincipal>(new ClaimsPrincipal(new ClaimsIdentity("Test")));
authServiceMock.SignOutCallback = s => Task.FromResult(new RemoteAuthenticationResult<RemoteAuthenticationState>()
{
Status = RemoteAuthenticationStatus.Failure,
ErrorMessage = "There was an error trying to log out"
});
var parameters = ParameterView.FromDictionary(new Dictionary<string, object>
{
[_action] = RemoteAuthenticationActions.LogOut
});
// Act
await renderer.Dispatcher.InvokeAsync<object>(() => remoteAuthenticator.SetParametersAsync(parameters));
// Assert
Assert.Equal("https://www.example.com/base/authentication/logout-failed", remoteAuthenticator.Navigation.Uri.ToString());
}
[Fact]
public async Task AuthenticationManager_LogoutCallback_ThrowsOnRedirectResult()
{
// Arrange
var (remoteAuthenticator, renderer, authServiceMock, jsRuntime) = CreateAuthenticationManager(
"https://www.example.com/base/authentication/logout-callback?returnUrl=https://www.example.com/base/fetchData");
var parameters = ParameterView.FromDictionary(new Dictionary<string, object>
{
[_action] = RemoteAuthenticationActions.LogOutCallback
});
authServiceMock.CompleteSignOutCallback = s => Task.FromResult(new RemoteAuthenticationResult<RemoteAuthenticationState>()
{
Status = RemoteAuthenticationStatus.Redirect,
});
await Assert.ThrowsAsync<InvalidOperationException>(
async () => await renderer.Dispatcher.InvokeAsync<object>(async () =>
{
await remoteAuthenticator.SetParametersAsync(parameters);
return null;
}));
}
[Fact]
public async Task AuthenticationManager_LogoutCallback_DoesNothingOnOperationCompleted()
{
// Arrange
var originalUrl = "https://www.example.com/base/authentication/logout-callback?code=1234";
var (remoteAuthenticator, renderer, authServiceMock, jsRuntime) = CreateAuthenticationManager(
originalUrl);
authServiceMock.CompleteSignOutCallback = s => Task.FromResult(new RemoteAuthenticationResult<RemoteAuthenticationState>()
{
Status = RemoteAuthenticationStatus.OperationCompleted
});
var parameters = ParameterView.FromDictionary(new Dictionary<string, object>
{
[_action] = RemoteAuthenticationActions.LogOutCallback
});
// Act
await renderer.Dispatcher.InvokeAsync<object>(() => remoteAuthenticator.SetParametersAsync(parameters));
// Assert
Assert.Equal(originalUrl, remoteAuthenticator.Navigation.Uri);
}
[Fact]
public async Task AuthenticationManager_LogoutCallback_NavigatesToReturnUrlFromStateOnSuccess()
{
// Arrange
var (remoteAuthenticator, renderer, authServiceMock, jsRuntime) = CreateAuthenticationManager(
"https://www.example.com/base/authentication/logout-callback-callback?code=1234");
var fetchDataUrl = "https://www.example.com/base/fetchData";
remoteAuthenticator.AuthenticationState.ReturnUrl = fetchDataUrl;
authServiceMock.CompleteSignOutCallback = s => Task.FromResult(new RemoteAuthenticationResult<RemoteAuthenticationState>()
{
Status = RemoteAuthenticationStatus.Success,
State = remoteAuthenticator.AuthenticationState
});
var loggingOutSucceededCalled = false;
var parameters = ParameterView.FromDictionary(new Dictionary<string, object>
{
[_action] = RemoteAuthenticationActions.LogOutCallback,
[_onLogOutSucceeded] = new EventCallbackFactory().Create<RemoteAuthenticationState>(
remoteAuthenticator,
(state) => loggingOutSucceededCalled = true),
});
// Act
await renderer.Dispatcher.InvokeAsync<object>(() => remoteAuthenticator.SetParametersAsync(parameters));
// Assert
Assert.Equal(fetchDataUrl, jsRuntime.LastInvocation.args[0]);
Assert.True(loggingOutSucceededCalled);
}
[Fact]
public async Task AuthenticationManager_LogoutCallback_NavigatesToLoginFailureOnError()
{
// Arrange
var (remoteAuthenticator, renderer, authServiceMock, jsRuntime) = CreateAuthenticationManager(
"https://www.example.com/base/authentication/logout-callback?code=1234");
var fetchDataUrl = "https://www.example.com/base/fetchData";
remoteAuthenticator.AuthenticationState.ReturnUrl = fetchDataUrl;
authServiceMock.CompleteSignOutCallback = s => Task.FromResult(new RemoteAuthenticationResult<RemoteAuthenticationState>()
{
Status = RemoteAuthenticationStatus.Failure,
ErrorMessage = "There was an error trying to log out"
});
var parameters = ParameterView.FromDictionary(new Dictionary<string, object>
{
[_action] = RemoteAuthenticationActions.LogOutCallback
});
// Act
await renderer.Dispatcher.InvokeAsync<object>(() => remoteAuthenticator.SetParametersAsync(parameters));
// Assert
Assert.Equal(
"https://www.example.com/base/authentication/logout-failed?message=There was an error trying to log out",
jsRuntime.LastInvocation.args[0]);
}
public static TheoryData<UIValidator> DisplaysRightUIData { get; } = new TheoryData<UIValidator>
{
{ new UIValidator {
Action = "login", SetupFakeRenderAction = (validator, remoteAuthenticator) => { remoteAuthenticator.LoggingIn = validator.FakeRender; } }
},
{ new UIValidator {
Action = "login-callback", SetupFakeRenderAction = (validator, remoteAuthenticator) => { remoteAuthenticator.CompletingLoggingIn = validator.FakeRender; } }
},
{ new UIValidator {
Action = "login-failed", SetupFakeRenderAction = (validator, remoteAuthenticator) => { remoteAuthenticator.LogInFailed = m => builder => validator.FakeRender(builder); } }
},
{ new UIValidator {
Action = "profile", SetupFakeRenderAction = (validator, remoteAuthenticator) => { remoteAuthenticator.LoggingIn = validator.FakeRender; } }
},
// Profile fragment overrides
{ new UIValidator {
Action = "profile", SetupFakeRenderAction = (validator, remoteAuthenticator) => { remoteAuthenticator.UserProfile = validator.FakeRender; } }
},
{ new UIValidator {
Action = "register", SetupFakeRenderAction = (validator, remoteAuthenticator) => { remoteAuthenticator.LoggingIn = validator.FakeRender; } }
},
// Register fragment overrides
{ new UIValidator {
Action = "register", SetupFakeRenderAction = (validator, remoteAuthenticator) => { remoteAuthenticator.Registering = validator.FakeRender; } }
},
{ new UIValidator {
Action = "logout", SetupFakeRenderAction = (validator, remoteAuthenticator) => { remoteAuthenticator.LogOut = validator.FakeRender; } }
},
{ new UIValidator {
Action = "logout-callback", SetupFakeRenderAction = (validator, remoteAuthenticator) => { remoteAuthenticator.CompletingLogOut = validator.FakeRender; } }
},
{ new UIValidator {
Action = "logout-failed", SetupFakeRenderAction = (validator, remoteAuthenticator) => { remoteAuthenticator.LogOutFailed = m => builder => validator.FakeRender(builder); } }
},
{ new UIValidator {
Action = "logged-out", SetupFakeRenderAction = (validator, remoteAuthenticator) => { remoteAuthenticator.LogOutSucceeded = validator.FakeRender; } }
},
};
[Theory]
[MemberData(nameof(DisplaysRightUIData))]
public async Task AuthenticationManager_DisplaysRightUI_ForEachStateAsync(UIValidator validator)
{
// Arrange
var renderer = new TestRenderer(new ServiceCollection().BuildServiceProvider());
var authenticator = new TestRemoteAuthenticatorView();
renderer.Attach(authenticator);
validator.SetupFakeRender(authenticator);
var parameters = ParameterView.FromDictionary(new Dictionary<string, object>
{
[_action] = validator.Action
});
// Act
await renderer.Dispatcher.InvokeAsync<object>(() => authenticator.SetParametersAsync(parameters));
// Assert
Assert.True(validator.WasCalled);
}
[Theory]
[MemberData(nameof(DisplaysRightUIData))]
public async Task AuthenticationManager_DoesNotThrowExceptionOnDisplaysUI_WhenPathsAreMissing(UIValidator validator)
{
// Arrange
var renderer = new TestRenderer(new ServiceCollection().BuildServiceProvider());
var authenticator = new TestRemoteAuthenticatorView(new RemoteAuthenticationApplicationPathsOptions());
renderer.Attach(authenticator);
validator.SetupFakeRender(authenticator);
var parameters = ParameterView.FromDictionary(new Dictionary<string, object>
{
[_action] = validator.Action
});
// Act
Task result = await renderer.Dispatcher.InvokeAsync<Task>(() => authenticator.SetParametersAsync(parameters));
// Assert
Assert.Null(result.Exception);
}
public static TheoryData<UIValidator, string> DisplaysRightUIWhenPathsAreMissingData { get; } = new TheoryData<UIValidator, string>
{
// Profile fragment overrides
{
new UIValidator {
Action = "profile",
SetupFakeRenderAction = (validator, remoteAuthenticator) => { remoteAuthenticator.UserProfile = validator.FakeRender; },
RetrieveOriginalRenderAction = (validator, remoteAuthenticator) => { validator.OriginalRender = remoteAuthenticator.UserProfile; } },
"ProfileNotSupportedFragment"
},
{
new UIValidator {
Action = "register",
SetupFakeRenderAction = (validator, remoteAuthenticator) => { remoteAuthenticator.Registering = validator.FakeRender; },
RetrieveOriginalRenderAction = (validator, remoteAuthenticator) => { validator.OriginalRender = remoteAuthenticator.Registering; } },
"RegisterNotSupportedFragment"
}
};
[Theory]
[MemberData(nameof(DisplaysRightUIWhenPathsAreMissingData))]
public async Task AuthenticationManager_DisplaysRightUI_WhenPathsAreMissing(UIValidator validator, string methodName)
{
// Arrange
var renderer = new TestRenderer(new ServiceCollection().BuildServiceProvider());
var jsRuntime = new TestJsRuntime();
var authenticator = new TestRemoteAuthenticatorView(new RemoteAuthenticationApplicationPathsOptions(), jsRuntime);
renderer.Attach(authenticator);
var parameters = ParameterView.FromDictionary(new Dictionary<string, object>
{
[_action] = validator.Action
});
// Act
await renderer.Dispatcher.InvokeAsync<object>(() => authenticator.SetParametersAsync(parameters));
validator.RetrieveOriginalRender(authenticator);
validator.SetupFakeRender(authenticator);
Task result = await renderer.Dispatcher.InvokeAsync<Task>(() => authenticator.SetParametersAsync(parameters));
// Assert
Assert.True(validator.WasCalled);
Assert.Equal(methodName, validator.OriginalRender.Method.Name);
Assert.Equal(default, jsRuntime.LastInvocation);
}
public class UIValidator
{
public string Action { get; set; }
public Action<UIValidator, RemoteAuthenticatorViewCore<RemoteAuthenticationState>> SetupFakeRenderAction { get; set; }
public Action<UIValidator, RemoteAuthenticatorViewCore<RemoteAuthenticationState>> RetrieveOriginalRenderAction { get; set; }
public bool WasCalled { get; set; }
public RenderFragment OriginalRender { get; set; }
public RenderFragment FakeRender { get; set; }
public UIValidator() => FakeRender = builder => WasCalled = true;
internal void SetupFakeRender(TestRemoteAuthenticatorView manager) => SetupFakeRenderAction(this, manager);
internal void RetrieveOriginalRender(TestRemoteAuthenticatorView manager) => RetrieveOriginalRenderAction(this, manager);
}
private static
(RemoteAuthenticatorViewCore<RemoteAuthenticationState> manager,
TestRenderer renderer,
TestRemoteAuthenticationService authenticationServiceMock,
TestJsRuntime js)
CreateAuthenticationManager(
string currentUri,
string baseUri = "https://www.example.com/base/")
{
var renderer = new TestRenderer(new ServiceCollection().BuildServiceProvider());
var remoteAuthenticator = new RemoteAuthenticatorViewCore<RemoteAuthenticationState>();
renderer.Attach(remoteAuthenticator);
var navigationManager = new TestNavigationManager(
baseUri,
currentUri);
remoteAuthenticator.Navigation = navigationManager;
remoteAuthenticator.AuthenticationState = new RemoteAuthenticationState();
remoteAuthenticator.ApplicationPaths = new RemoteAuthenticationApplicationPathsOptions();
var jsRuntime = new TestJsRuntime();
var authenticationServiceMock = new TestRemoteAuthenticationService(
jsRuntime,
Mock.Of<IOptionsSnapshot<RemoteAuthenticationOptions<OidcProviderOptions>>>(),
navigationManager);
remoteAuthenticator.SignOutManager = new TestSignOutSessionStateManager();
remoteAuthenticator.AuthenticationService = authenticationServiceMock;
remoteAuthenticator.AuthenticationProvider = authenticationServiceMock;
remoteAuthenticator.JS = jsRuntime;
return (remoteAuthenticator, renderer, authenticationServiceMock, jsRuntime);
}
private class TestNavigationManager : NavigationManager
{
public TestNavigationManager(string baseUrl, string currentUrl) => Initialize(baseUrl, currentUrl);
protected override void NavigateToCore(string uri, bool forceLoad)
=> Uri = System.Uri.IsWellFormedUriString(uri, UriKind.Absolute) ? uri : new Uri(new Uri(BaseUri), uri).ToString();
}
private class TestSignOutSessionStateManager : SignOutSessionStateManager
{
public TestSignOutSessionStateManager() : base(null)
{
}
public bool SignOutState { get; set; } = true;
public override ValueTask SetSignOutState()
{
SignOutState = true;
return default;
}
public override Task<bool> ValidateSignOutState() => Task.FromResult(SignOutState);
}
private class TestJsRuntime : IJSRuntime
{
public (string identifier, object[] args) LastInvocation { get; set; }
public ValueTask<TValue> InvokeAsync<TValue>(string identifier, object[] args)
{
LastInvocation = (identifier, args);
return default;
}
public ValueTask<TValue> InvokeAsync<TValue>(string identifier, CancellationToken cancellationToken, object[] args)
{
LastInvocation = (identifier, args);
return default;
}
}
public class TestRemoteAuthenticatorView : RemoteAuthenticatorViewCore<RemoteAuthenticationState>
{
public TestRemoteAuthenticatorView()
{
ApplicationPaths = new RemoteAuthenticationApplicationPathsOptions()
{
RemoteProfilePath = "Identity/Account/Manage",
RemoteRegisterPath = "Identity/Account/Register",
};
}
public TestRemoteAuthenticatorView(RemoteAuthenticationApplicationPathsOptions applicationPaths, IJSRuntime jsRuntime = default)
{
ApplicationPaths = applicationPaths;
JS = jsRuntime;
}
protected override Task OnParametersSetAsync()
{
if (Action == "register" || Action == "profile")
{
return base.OnParametersSetAsync();
}
return Task.CompletedTask;
}
}
private class TestRemoteAuthenticationService : RemoteAuthenticationService<RemoteAuthenticationState, RemoteUserAccount, OidcProviderOptions>
{
public TestRemoteAuthenticationService(
IJSRuntime jsRuntime,
IOptionsSnapshot<RemoteAuthenticationOptions<OidcProviderOptions>> options,
TestNavigationManager navigationManager) :
base(jsRuntime, options, navigationManager, new AccountClaimsPrincipalFactory<RemoteUserAccount>(Mock.Of<IAccessTokenProviderAccessor>()))
{
}
public Func<RemoteAuthenticationContext<RemoteAuthenticationState>, Task<RemoteAuthenticationResult<RemoteAuthenticationState>>> SignInCallback { get; set; }
public Func<RemoteAuthenticationContext<RemoteAuthenticationState>, Task<RemoteAuthenticationResult<RemoteAuthenticationState>>> CompleteSignInCallback { get; set; }
public Func<RemoteAuthenticationContext<RemoteAuthenticationState>, Task<RemoteAuthenticationResult<RemoteAuthenticationState>>> SignOutCallback { get; set; }
public Func<RemoteAuthenticationContext<RemoteAuthenticationState>, Task<RemoteAuthenticationResult<RemoteAuthenticationState>>> CompleteSignOutCallback { get; set; }
public Func<ValueTask<ClaimsPrincipal>> GetAuthenticatedUserCallback { get; set; }
public override async Task<AuthenticationState> GetAuthenticationStateAsync() => new AuthenticationState(await GetAuthenticatedUserCallback());
public override Task<RemoteAuthenticationResult<RemoteAuthenticationState>> CompleteSignInAsync(RemoteAuthenticationContext<RemoteAuthenticationState> context) => CompleteSignInCallback(context);
protected internal override ValueTask<ClaimsPrincipal> GetAuthenticatedUser() => GetAuthenticatedUserCallback();
public override Task<RemoteAuthenticationResult<RemoteAuthenticationState>> CompleteSignOutAsync(RemoteAuthenticationContext<RemoteAuthenticationState> context) => CompleteSignOutCallback(context);
public override Task<RemoteAuthenticationResult<RemoteAuthenticationState>> SignInAsync(RemoteAuthenticationContext<RemoteAuthenticationState> context) => SignInCallback(context);
public override Task<RemoteAuthenticationResult<RemoteAuthenticationState>> SignOutAsync(RemoteAuthenticationContext<RemoteAuthenticationState> context) => SignOutCallback(context);
}
private class TestRenderer : Renderer
{
public TestRenderer(IServiceProvider services)
: base(services, NullLoggerFactory.Instance)
{
}
public int Attach(IComponent component) => AssignRootComponentId(component);
private static readonly Dispatcher _dispatcher = Dispatcher.CreateDefault();
public override Dispatcher Dispatcher => _dispatcher;
protected override void HandleException(Exception exception)
=> ExceptionDispatchInfo.Capture(exception).Throw();
protected override Task UpdateDisplayAsync(in RenderBatch renderBatch) =>
Task.CompletedTask;
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Runtime.CompilerServices;
using Roslyn.Utilities;
using Ref = System.Reflection;
namespace Microsoft.CodeAnalysis.Scripting
{
public abstract partial class ObjectFormatter
{
// internal for testing
internal sealed class Formatter
{
private readonly ObjectFormatter _language;
private readonly ObjectFormattingOptions _options;
private HashSet<object> _lazyVisitedObjects;
private HashSet<object> VisitedObjects
{
get
{
if (_lazyVisitedObjects == null)
{
_lazyVisitedObjects = new HashSet<object>(ReferenceEqualityComparer.Instance);
}
return _lazyVisitedObjects;
}
}
public Formatter(ObjectFormatter language, ObjectFormattingOptions options)
{
_options = options ?? ObjectFormattingOptions.Default;
_language = language;
}
private Builder MakeMemberBuilder(int limit)
{
return new Builder(Math.Min(_options.MaxLineLength, limit), _options, insertEllipsis: false);
}
public string FormatObject(object obj)
{
try
{
var builder = new Builder(_options.MaxOutputLength, _options, insertEllipsis: true);
string _;
return FormatObjectRecursive(builder, obj, _options.QuoteStrings, _options.MemberFormat, out _).ToString();
}
catch (InsufficientExecutionStackException)
{
return ScriptingResources.StackOverflowWhileEvaluat;
}
}
private Builder FormatObjectRecursive(Builder result, object obj, bool quoteStrings, MemberDisplayFormat memberFormat, out string name)
{
name = null;
string primitive = _language.FormatPrimitive(obj, quoteStrings, _options.IncludeCodePoints, _options.UseHexadecimalNumbers);
if (primitive != null)
{
result.Append(primitive);
return result;
}
object originalObj = obj;
Type originalType = originalObj.GetType();
//
// Override KeyValuePair<,>.ToString() to get better dictionary elements formatting:
//
// { { format(key), format(value) }, ... }
// instead of
// { [key.ToString(), value.ToString()], ... }
//
// This is more general than overriding Dictionary<,> debugger proxy attribute since it applies on all
// types that return an array of KeyValuePair in their DebuggerDisplay to display items.
//
if (originalType.IsGenericType && originalType.GetGenericTypeDefinition() == typeof(KeyValuePair<,>))
{
if (memberFormat != MemberDisplayFormat.InlineValue)
{
result.Append(_language.FormatTypeName(originalType, _options));
result.Append(' ');
}
FormatKeyValuePair(result, originalObj);
return result;
}
if (originalType.IsArray)
{
if (!VisitedObjects.Add(originalObj))
{
result.AppendInfiniteRecursionMarker();
return result;
}
FormatArray(result, (Array)originalObj, inline: memberFormat != MemberDisplayFormat.List);
VisitedObjects.Remove(originalObj);
return result;
}
DebuggerDisplayAttribute debuggerDisplay = GetApplicableDebuggerDisplayAttribute(originalType);
if (debuggerDisplay != null)
{
name = debuggerDisplay.Name;
}
bool suppressMembers = false;
//
// TypeName(count) for ICollection implementers
// or
// TypeName([[DebuggerDisplay.Value]]) // Inline
// [[DebuggerDisplay.Value]] // InlineValue
// or
// [[ToString()]] if ToString overridden
// or
// TypeName
//
ICollection collection;
if ((collection = originalObj as ICollection) != null)
{
FormatCollectionHeader(result, collection);
}
else if (debuggerDisplay != null && !String.IsNullOrEmpty(debuggerDisplay.Value))
{
if (memberFormat != MemberDisplayFormat.InlineValue)
{
result.Append(_language.FormatTypeName(originalType, _options));
result.Append('(');
}
FormatWithEmbeddedExpressions(result, debuggerDisplay.Value, originalObj);
if (memberFormat != MemberDisplayFormat.InlineValue)
{
result.Append(')');
}
suppressMembers = true;
}
else if (HasOverriddenToString(originalType))
{
ObjectToString(result, originalObj);
suppressMembers = true;
}
else
{
result.Append(_language.FormatTypeName(originalType, _options));
}
if (memberFormat == MemberDisplayFormat.NoMembers)
{
return result;
}
bool includeNonPublic = memberFormat == MemberDisplayFormat.List;
object proxy = GetDebuggerTypeProxy(obj);
if (proxy != null)
{
obj = proxy;
includeNonPublic = false;
suppressMembers = false;
}
if (memberFormat != MemberDisplayFormat.List && suppressMembers)
{
return result;
}
// TODO (tomat): we should not use recursion
RuntimeHelpers.EnsureSufficientExecutionStack();
result.Append(' ');
if (!VisitedObjects.Add(originalObj))
{
result.AppendInfiniteRecursionMarker();
return result;
}
// handle special types only if a proxy isn't defined
if (proxy == null)
{
IDictionary dictionary;
if ((dictionary = obj as IDictionary) != null)
{
FormatDictionary(result, dictionary, inline: memberFormat != MemberDisplayFormat.List);
return result;
}
IEnumerable enumerable;
if ((enumerable = obj as IEnumerable) != null)
{
FormatSequence(result, enumerable, inline: memberFormat != MemberDisplayFormat.List);
return result;
}
}
FormatObjectMembers(result, obj, originalType, includeNonPublic, inline: memberFormat != MemberDisplayFormat.List);
VisitedObjects.Remove(obj);
return result;
}
#region Members
/// <summary>
/// Formats object members to a list.
///
/// Inline == false:
/// <code>
/// { A=true, B=false, C=new int[3] { 1, 2, 3 } }
/// </code>
///
/// Inline == true:
/// <code>
/// {
/// A: true,
/// B: false,
/// C: new int[3] { 1, 2, 3 }
/// }
/// </code>
/// </summary>
private void FormatObjectMembers(Builder result, object obj, Type originalType, bool includeNonPublic, bool inline)
{
int lengthLimit = result.Remaining;
if (lengthLimit < 0)
{
return;
}
var members = new List<FormattedMember>();
// Limits the number of members added into the result. Some more members may be added than it will fit into the result
// and will be thrown away later but not many more.
FormatObjectMembersRecursive(members, obj, includeNonPublic, ref lengthLimit);
bool useCollectionFormat = UseCollectionFormat(members, originalType);
result.AppendGroupOpening();
for (int i = 0; i < members.Count; i++)
{
result.AppendCollectionItemSeparator(isFirst: i == 0, inline: inline);
if (useCollectionFormat)
{
members[i].AppendAsCollectionEntry(result);
}
else
{
members[i].Append(result, inline ? "=" : ": ");
}
if (result.LimitReached)
{
break;
}
}
result.AppendGroupClosing(inline);
}
private static bool UseCollectionFormat(IEnumerable<FormattedMember> members, Type originalType)
{
return typeof(IEnumerable).IsAssignableFrom(originalType) && members.All(member => member.Index >= 0);
}
private struct FormattedMember
{
// Non-negative if the member is an inlined element of an array (DebuggerBrowsableState.RootHidden applied on a member of array type).
public readonly int Index;
// Formatted name of the member or null if it doesn't have a name (Index is >=0 then).
public readonly string Name;
// Formatted value of the member.
public readonly string Value;
public FormattedMember(int index, string name, string value)
{
Name = name;
Index = index;
Value = value;
}
public int MinimalLength
{
get { return (Name != null ? Name.Length : "[0]".Length) + Value.Length; }
}
public string GetDisplayName()
{
return Name ?? "[" + Index.ToString() + "]";
}
public bool HasKeyName()
{
return Index >= 0 && Name != null && Name.Length >= 2 && Name[0] == '[' && Name[Name.Length - 1] == ']';
}
public bool AppendAsCollectionEntry(Builder result)
{
// Some BCL collections use [{key.ToString()}]: {value.ToString()} pattern to display collection entries.
// We want them to be printed initializer-style, i.e. { <key>, <value> }
if (HasKeyName())
{
result.AppendGroupOpening();
result.AppendCollectionItemSeparator(isFirst: true, inline: true);
result.Append(Name, 1, Name.Length - 2);
result.AppendCollectionItemSeparator(isFirst: false, inline: true);
result.Append(Value);
result.AppendGroupClosing(inline: true);
}
else
{
result.Append(Value);
}
return true;
}
public bool Append(Builder result, string separator)
{
result.Append(GetDisplayName());
result.Append(separator);
result.Append(Value);
return true;
}
}
/// <summary>
/// Enumerates sorted object members to display.
/// </summary>
private void FormatObjectMembersRecursive(List<FormattedMember> result, object obj, bool includeNonPublic, ref int lengthLimit)
{
Debug.Assert(obj != null);
var type = obj.GetType();
var fields = type.GetFields(Ref.BindingFlags.Instance | Ref.BindingFlags.Public | Ref.BindingFlags.NonPublic);
var properties = type.GetProperties(Ref.BindingFlags.Instance | Ref.BindingFlags.Public | Ref.BindingFlags.NonPublic);
var members = new List<Ref.MemberInfo>(fields.Length + properties.Length);
members.AddRange(fields);
members.AddRange(properties);
// kirillo: need case-sensitive comparison here so that the order of members is
// always well-defined (members can differ by case only). And we don't want to
// depend on that order. TODO (tomat): sort by visibility.
members.Sort(new Comparison<Ref.MemberInfo>((x, y) =>
{
int comparisonResult = StringComparer.OrdinalIgnoreCase.Compare(x.Name, y.Name);
if (comparisonResult == 0)
{
comparisonResult = StringComparer.Ordinal.Compare(x.Name, y.Name);
}
return comparisonResult;
}));
foreach (var member in members)
{
if (_language.IsHiddenMember(member))
{
continue;
}
bool rootHidden = false, ignoreVisibility = false;
var browsable = (DebuggerBrowsableAttribute)member.GetCustomAttributes(typeof(DebuggerBrowsableAttribute), false).FirstOrDefault();
if (browsable != null)
{
if (browsable.State == DebuggerBrowsableState.Never)
{
continue;
}
ignoreVisibility = true;
rootHidden = browsable.State == DebuggerBrowsableState.RootHidden;
}
Ref.FieldInfo field = member as Ref.FieldInfo;
if (field != null)
{
if (!(includeNonPublic || ignoreVisibility || field.IsPublic || field.IsFamily || field.IsFamilyOrAssembly))
{
continue;
}
}
else
{
Ref.PropertyInfo property = (Ref.PropertyInfo)member;
var getter = property.GetGetMethod(nonPublic: true);
var setter = property.GetSetMethod(nonPublic: true);
if (!(includeNonPublic || ignoreVisibility ||
getter != null && (getter.IsPublic || getter.IsFamily || getter.IsFamilyOrAssembly) ||
setter != null && (setter.IsPublic || setter.IsFamily || setter.IsFamilyOrAssembly)))
{
continue;
}
if (getter.GetParameters().Length > 0)
{
continue;
}
}
var debuggerDisplay = GetApplicableDebuggerDisplayAttribute(member);
if (debuggerDisplay != null)
{
string k = FormatWithEmbeddedExpressions(lengthLimit, debuggerDisplay.Name, obj) ?? _language.FormatMemberName(member);
string v = FormatWithEmbeddedExpressions(lengthLimit, debuggerDisplay.Value, obj) ?? String.Empty; // TODO: ?
if (!AddMember(result, new FormattedMember(-1, k, v), ref lengthLimit))
{
return;
}
continue;
}
Exception exception;
object value = GetMemberValue(member, obj, out exception);
if (exception != null)
{
var memberValueBuilder = MakeMemberBuilder(lengthLimit);
FormatException(memberValueBuilder, exception);
if (!AddMember(result, new FormattedMember(-1, _language.FormatMemberName(member), memberValueBuilder.ToString()), ref lengthLimit))
{
return;
}
continue;
}
if (rootHidden)
{
if (value != null && !VisitedObjects.Contains(value))
{
Array array;
if ((array = value as Array) != null) // TODO (tomat): n-dim arrays
{
int i = 0;
foreach (object item in array)
{
string name;
Builder valueBuilder = MakeMemberBuilder(lengthLimit);
FormatObjectRecursive(valueBuilder, item, _options.QuoteStrings, MemberDisplayFormat.InlineValue, out name);
if (!String.IsNullOrEmpty(name))
{
name = FormatWithEmbeddedExpressions(MakeMemberBuilder(lengthLimit), name, item).ToString();
}
if (!AddMember(result, new FormattedMember(i, name, valueBuilder.ToString()), ref lengthLimit))
{
return;
}
i++;
}
}
else if (_language.FormatPrimitive(value, _options.QuoteStrings, _options.IncludeCodePoints, _options.UseHexadecimalNumbers) == null && VisitedObjects.Add(value))
{
FormatObjectMembersRecursive(result, value, includeNonPublic, ref lengthLimit);
VisitedObjects.Remove(value);
}
}
}
else
{
string name;
Builder valueBuilder = MakeMemberBuilder(lengthLimit);
FormatObjectRecursive(valueBuilder, value, _options.QuoteStrings, MemberDisplayFormat.InlineValue, out name);
if (String.IsNullOrEmpty(name))
{
name = _language.FormatMemberName(member);
}
else
{
name = FormatWithEmbeddedExpressions(MakeMemberBuilder(lengthLimit), name, value).ToString();
}
if (!AddMember(result, new FormattedMember(-1, name, valueBuilder.ToString()), ref lengthLimit))
{
return;
}
}
}
}
private bool AddMember(List<FormattedMember> members, FormattedMember member, ref int remainingLength)
{
// Add this item even if we exceed the limit - its prefix might be appended to the result.
members.Add(member);
// We don't need to calculate an exact length, just a lower bound on the size.
// We can add more members to the result than it will eventually fit, we shouldn't add less.
// Add 2 more, even if only one or half of it fit, so that the separator is included in edge cases.
if (remainingLength == Int32.MinValue)
{
return false;
}
remainingLength -= member.MinimalLength;
if (remainingLength <= 0)
{
remainingLength = Int32.MinValue;
}
return true;
}
private void FormatException(Builder result, Exception exception)
{
result.Append("!<");
result.Append(_language.FormatTypeName(exception.GetType(), _options));
result.Append('>');
}
#endregion
#region Collections
private void FormatKeyValuePair(Builder result, object obj)
{
Type type = obj.GetType();
object key = type.GetProperty("Key").GetValue(obj, SpecializedCollections.EmptyObjects);
object value = type.GetProperty("Value").GetValue(obj, SpecializedCollections.EmptyObjects);
string _;
result.AppendGroupOpening();
result.AppendCollectionItemSeparator(isFirst: true, inline: true);
FormatObjectRecursive(result, key, quoteStrings: true, memberFormat: MemberDisplayFormat.InlineValue, name: out _);
result.AppendCollectionItemSeparator(isFirst: false, inline: true);
FormatObjectRecursive(result, value, quoteStrings: true, memberFormat: MemberDisplayFormat.InlineValue, name: out _);
result.AppendGroupClosing(inline: true);
}
private void FormatCollectionHeader(Builder result, ICollection collection)
{
Array array = collection as Array;
if (array != null)
{
result.Append(_language.FormatArrayTypeName(array, _options));
return;
}
result.Append(_language.FormatTypeName(collection.GetType(), _options));
try
{
result.Append('(');
result.Append(collection.Count.ToString());
result.Append(')');
}
catch (Exception)
{
// skip
}
}
private void FormatArray(Builder result, Array array, bool inline)
{
FormatCollectionHeader(result, array);
if (array.Rank > 1)
{
FormatMultidimensionalArray(result, array, inline);
}
else
{
result.Append(' ');
FormatSequence(result, (IEnumerable)array, inline);
}
}
private void FormatDictionary(Builder result, IDictionary dict, bool inline)
{
result.AppendGroupOpening();
int i = 0;
try
{
IDictionaryEnumerator enumerator = dict.GetEnumerator();
IDisposable disposable = enumerator as IDisposable;
try
{
while (enumerator.MoveNext())
{
var entry = enumerator.Entry;
string _;
result.AppendCollectionItemSeparator(isFirst: i == 0, inline: inline);
result.AppendGroupOpening();
result.AppendCollectionItemSeparator(isFirst: true, inline: true);
FormatObjectRecursive(result, entry.Key, quoteStrings: true, memberFormat: MemberDisplayFormat.InlineValue, name: out _);
result.AppendCollectionItemSeparator(isFirst: false, inline: true);
FormatObjectRecursive(result, entry.Value, quoteStrings: true, memberFormat: MemberDisplayFormat.InlineValue, name: out _);
result.AppendGroupClosing(inline: true);
i++;
}
}
finally
{
if (disposable != null)
{
disposable.Dispose();
}
}
}
catch (Exception e)
{
result.AppendCollectionItemSeparator(isFirst: i == 0, inline: inline);
FormatException(result, e);
result.Append(' ');
result.Append(_options.Ellipsis);
}
result.AppendGroupClosing(inline);
}
private void FormatSequence(Builder result, IEnumerable sequence, bool inline)
{
result.AppendGroupOpening();
int i = 0;
try
{
foreach (var item in sequence)
{
string name;
result.AppendCollectionItemSeparator(isFirst: i == 0, inline: inline);
FormatObjectRecursive(result, item, quoteStrings: true, memberFormat: MemberDisplayFormat.InlineValue, name: out name);
i++;
}
}
catch (Exception e)
{
result.AppendCollectionItemSeparator(isFirst: i == 0, inline: inline);
FormatException(result, e);
result.Append(" ...");
}
result.AppendGroupClosing(inline);
}
private void FormatMultidimensionalArray(Builder result, Array array, bool inline)
{
Debug.Assert(array.Rank > 1);
if (array.Length == 0)
{
result.AppendCollectionItemSeparator(isFirst: true, inline: true);
result.AppendGroupOpening();
result.AppendGroupClosing(inline: true);
return;
}
int[] indices = new int[array.Rank];
for (int i = array.Rank - 1; i >= 0; i--)
{
indices[i] = array.GetLowerBound(i);
}
int nesting = 0;
int flatIndex = 0;
while (true)
{
// increment indices (lower index overflows to higher):
int i = indices.Length - 1;
while (indices[i] > array.GetUpperBound(i))
{
indices[i] = array.GetLowerBound(i);
result.AppendGroupClosing(inline: inline || nesting != 1);
nesting--;
i--;
if (i < 0)
{
return;
}
indices[i]++;
}
result.AppendCollectionItemSeparator(isFirst: flatIndex == 0, inline: inline || nesting != 1);
i = indices.Length - 1;
while (i >= 0 && indices[i] == array.GetLowerBound(i))
{
result.AppendGroupOpening();
nesting++;
// array isn't empty, so there is always an element following this separator
result.AppendCollectionItemSeparator(isFirst: true, inline: inline || nesting != 1);
i--;
}
string name;
FormatObjectRecursive(result, array.GetValue(indices), quoteStrings: true, memberFormat: MemberDisplayFormat.InlineValue, name: out name);
indices[indices.Length - 1]++;
flatIndex++;
}
}
#endregion
#region Scalars
private void ObjectToString(Builder result, object obj)
{
try
{
string str = obj.ToString();
result.Append('[');
result.Append(str);
result.Append(']');
}
catch (Exception e)
{
FormatException(result, e);
}
}
#endregion
#region DebuggerDisplay Embedded Expressions
/// <summary>
/// Evaluate a format string with possible member references enclosed in braces.
/// E.g. "foo = {GetFooString(),nq}, bar = {Bar}".
/// </summary>
/// <remarks>
/// Although in theory any expression is allowed to be embedded in the string such behavior is in practice fundamentally broken.
/// The attribute doesn't specify what language (VB, C#, F#, etc.) to use to parse these expressions. Even if it did all languages
/// would need to be able to evaluate each other language's expressions, which is not viable and the Expression Evaluator doesn't
/// work that way today. Instead it evaluates the embedded expressions in the language of the current method frame. When consuming
/// VB objects from C#, for example, the evaluation might fail due to language mismatch (evaluating VB expression using C# parser).
///
/// Therefore we limit the expressions to a simple language independent syntax: {clr-member-name} '(' ')' ',nq',
/// where parentheses and ,nq suffix (no-quotes) are optional and the name is an arbitrary CLR field, property, or method name.
/// We then resolve the member by name using case-sensitive lookup first with fallback to case insensitive and evaluate it.
/// If parentheses are present we only look for methods.
/// Only parameter less members are considered.
/// </remarks>
private string FormatWithEmbeddedExpressions(int lengthLimit, string format, object obj)
{
if (String.IsNullOrEmpty(format))
{
return null;
}
return FormatWithEmbeddedExpressions(new Builder(lengthLimit, _options, insertEllipsis: false), format, obj).ToString();
}
private Builder FormatWithEmbeddedExpressions(Builder result, string format, object obj)
{
int i = 0;
while (i < format.Length)
{
char c = format[i++];
if (c == '{')
{
if (i >= 2 && format[i - 2] == '\\')
{
result.Append('{');
}
else
{
int expressionEnd = format.IndexOf('}', i);
bool noQuotes, callableOnly;
string memberName;
if (expressionEnd == -1 || (memberName = ParseSimpleMemberName(format, i, expressionEnd, out noQuotes, out callableOnly)) == null)
{
// the expression isn't properly formatted
result.Append(format, i - 1, format.Length - i + 1);
break;
}
Ref.MemberInfo member = ResolveMember(obj, memberName, callableOnly);
if (member == null)
{
result.AppendFormat(callableOnly ? "!<Method '{0}' not found>" : "!<Member '{0}' not found>", memberName);
}
else
{
Exception exception;
object value = GetMemberValue(member, obj, out exception);
if (exception != null)
{
FormatException(result, exception);
}
else
{
string name;
FormatObjectRecursive(result, value, !noQuotes, MemberDisplayFormat.NoMembers, out name);
}
}
i = expressionEnd + 1;
}
}
else
{
result.Append(c);
}
}
return result;
}
// Parses
// <clr-member-name>
// <clr-member-name> ',' 'nq'
// <clr-member-name> '(' ')'
// <clr-member-name> '(' ')' ',' 'nq'
//
// Internal for testing purposes.
internal static string ParseSimpleMemberName(string str, int start, int end, out bool noQuotes, out bool isCallable)
{
Debug.Assert(str != null && start >= 0 && end >= start);
isCallable = false;
noQuotes = false;
// no-quotes suffix:
if (end - 3 >= start && str[end - 2] == 'n' && str[end - 1] == 'q')
{
int j = end - 3;
while (j >= start && Char.IsWhiteSpace(str[j]))
{
j--;
}
if (j >= start && str[j] == ',')
{
noQuotes = true;
end = j;
}
}
int i = end - 1;
EatTrailingWhiteSpace(str, start, ref i);
if (i > start && str[i] == ')')
{
int closingParen = i;
i--;
EatTrailingWhiteSpace(str, start, ref i);
if (str[i] != '(')
{
i = closingParen;
}
else
{
i--;
EatTrailingWhiteSpace(str, start, ref i);
isCallable = true;
}
}
EatLeadingWhiteSpace(str, ref start, i);
return str.Substring(start, i - start + 1);
}
private static void EatTrailingWhiteSpace(string str, int start, ref int i)
{
while (i >= start && Char.IsWhiteSpace(str[i]))
{
i--;
}
}
private static void EatLeadingWhiteSpace(string str, ref int i, int end)
{
while (i < end && Char.IsWhiteSpace(str[i]))
{
i++;
}
}
#endregion
}
}
}
| |
using Lucene.Net.Codecs;
using Lucene.Net.Documents;
using Lucene.Net.Support;
using Lucene.Net.Support.Threading;
using NUnit.Framework;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using Console = Lucene.Net.Support.SystemConsole;
namespace Lucene.Net.Index
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using BaseDirectoryWrapper = Lucene.Net.Store.BaseDirectoryWrapper;
using Codec = Lucene.Net.Codecs.Codec;
using Directory = Lucene.Net.Store.Directory;
using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
using Document = Documents.Document;
using Field = Field;
using FieldType = FieldType;
using FilterCodec = Lucene.Net.Codecs.FilterCodec;
using IOUtils = Lucene.Net.Util.IOUtils;
using LockObtainFailedException = Lucene.Net.Store.LockObtainFailedException;
using Lucene46Codec = Lucene.Net.Codecs.Lucene46.Lucene46Codec;
using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
using PhraseQuery = Lucene.Net.Search.PhraseQuery;
using PostingsFormat = Lucene.Net.Codecs.PostingsFormat;
using Pulsing41PostingsFormat = Lucene.Net.Codecs.Pulsing.Pulsing41PostingsFormat;
using RAMDirectory = Lucene.Net.Store.RAMDirectory;
using StringField = StringField;
using TestUtil = Lucene.Net.Util.TestUtil;
using TextField = TextField;
[TestFixture]
public class TestAddIndexes : LuceneTestCase
{
[Test]
public virtual void TestSimpleCase()
{
// main directory
Directory dir = NewDirectory();
// two auxiliary directories
Directory aux = NewDirectory();
Directory aux2 = NewDirectory();
IndexWriter writer = null;
writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE));
// add 100 documents
AddDocs(writer, 100);
Assert.AreEqual(100, writer.MaxDoc);
writer.Dispose();
TestUtil.CheckIndex(dir);
writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetMergePolicy(NewLogMergePolicy(false)));
// add 40 documents in separate files
AddDocs(writer, 40);
Assert.AreEqual(40, writer.MaxDoc);
writer.Dispose();
writer = NewWriter(aux2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE));
// add 50 documents in compound files
AddDocs2(writer, 50);
Assert.AreEqual(50, writer.MaxDoc);
writer.Dispose();
// test doc count before segments are merged
writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND));
Assert.AreEqual(100, writer.MaxDoc);
writer.AddIndexes(aux, aux2);
Assert.AreEqual(190, writer.MaxDoc);
writer.Dispose();
TestUtil.CheckIndex(dir);
// make sure the old index is correct
VerifyNumDocs(aux, 40);
// make sure the new index is correct
VerifyNumDocs(dir, 190);
// now add another set in.
Directory aux3 = NewDirectory();
writer = NewWriter(aux3, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
// add 40 documents
AddDocs(writer, 40);
Assert.AreEqual(40, writer.MaxDoc);
writer.Dispose();
// test doc count before segments are merged
writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND));
Assert.AreEqual(190, writer.MaxDoc);
writer.AddIndexes(aux3);
Assert.AreEqual(230, writer.MaxDoc);
writer.Dispose();
// make sure the new index is correct
VerifyNumDocs(dir, 230);
VerifyTermDocs(dir, new Term("content", "aaa"), 180);
VerifyTermDocs(dir, new Term("content", "bbb"), 50);
// now fully merge it.
writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND));
writer.ForceMerge(1);
writer.Dispose();
// make sure the new index is correct
VerifyNumDocs(dir, 230);
VerifyTermDocs(dir, new Term("content", "aaa"), 180);
VerifyTermDocs(dir, new Term("content", "bbb"), 50);
// now add a single document
Directory aux4 = NewDirectory();
writer = NewWriter(aux4, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
AddDocs2(writer, 1);
writer.Dispose();
writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND));
Assert.AreEqual(230, writer.MaxDoc);
writer.AddIndexes(aux4);
Assert.AreEqual(231, writer.MaxDoc);
writer.Dispose();
VerifyNumDocs(dir, 231);
VerifyTermDocs(dir, new Term("content", "bbb"), 51);
dir.Dispose();
aux.Dispose();
aux2.Dispose();
aux3.Dispose();
aux4.Dispose();
}
[Test]
public virtual void TestWithPendingDeletes()
{
// main directory
Directory dir = NewDirectory();
// auxiliary directory
Directory aux = NewDirectory();
SetUpDirs(dir, aux);
IndexWriter writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND));
writer.AddIndexes(aux);
// Adds 10 docs, then replaces them with another 10
// docs, so 10 pending deletes:
for (int i = 0; i < 20; i++)
{
Document doc = new Document();
doc.Add(NewStringField("id", "" + (i % 10), Field.Store.NO));
doc.Add(NewTextField("content", "bbb " + i, Field.Store.NO));
writer.UpdateDocument(new Term("id", "" + (i % 10)), doc);
}
// Deletes one of the 10 added docs, leaving 9:
PhraseQuery q = new PhraseQuery();
q.Add(new Term("content", "bbb"));
q.Add(new Term("content", "14"));
writer.DeleteDocuments(q);
writer.ForceMerge(1);
writer.Commit();
VerifyNumDocs(dir, 1039);
VerifyTermDocs(dir, new Term("content", "aaa"), 1030);
VerifyTermDocs(dir, new Term("content", "bbb"), 9);
writer.Dispose();
dir.Dispose();
aux.Dispose();
}
[Test]
public virtual void TestWithPendingDeletes2()
{
// main directory
Directory dir = NewDirectory();
// auxiliary directory
Directory aux = NewDirectory();
SetUpDirs(dir, aux);
IndexWriter writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND));
// Adds 10 docs, then replaces them with another 10
// docs, so 10 pending deletes:
for (int i = 0; i < 20; i++)
{
Document doc = new Document();
doc.Add(NewStringField("id", "" + (i % 10), Field.Store.NO));
doc.Add(NewTextField("content", "bbb " + i, Field.Store.NO));
writer.UpdateDocument(new Term("id", "" + (i % 10)), doc);
}
writer.AddIndexes(aux);
// Deletes one of the 10 added docs, leaving 9:
PhraseQuery q = new PhraseQuery();
q.Add(new Term("content", "bbb"));
q.Add(new Term("content", "14"));
writer.DeleteDocuments(q);
writer.ForceMerge(1);
writer.Commit();
VerifyNumDocs(dir, 1039);
VerifyTermDocs(dir, new Term("content", "aaa"), 1030);
VerifyTermDocs(dir, new Term("content", "bbb"), 9);
writer.Dispose();
dir.Dispose();
aux.Dispose();
}
[Test]
public virtual void TestWithPendingDeletes3()
{
// main directory
Directory dir = NewDirectory();
// auxiliary directory
Directory aux = NewDirectory();
SetUpDirs(dir, aux);
IndexWriter writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND));
// Adds 10 docs, then replaces them with another 10
// docs, so 10 pending deletes:
for (int i = 0; i < 20; i++)
{
Document doc = new Document();
doc.Add(NewStringField("id", "" + (i % 10), Field.Store.NO));
doc.Add(NewTextField("content", "bbb " + i, Field.Store.NO));
writer.UpdateDocument(new Term("id", "" + (i % 10)), doc);
}
// Deletes one of the 10 added docs, leaving 9:
PhraseQuery q = new PhraseQuery();
q.Add(new Term("content", "bbb"));
q.Add(new Term("content", "14"));
writer.DeleteDocuments(q);
writer.AddIndexes(aux);
writer.ForceMerge(1);
writer.Commit();
VerifyNumDocs(dir, 1039);
VerifyTermDocs(dir, new Term("content", "aaa"), 1030);
VerifyTermDocs(dir, new Term("content", "bbb"), 9);
writer.Dispose();
dir.Dispose();
aux.Dispose();
}
// case 0: add self or exceed maxMergeDocs, expect exception
[Test]
public virtual void TestAddSelf()
{
// main directory
Directory dir = NewDirectory();
// auxiliary directory
Directory aux = NewDirectory();
IndexWriter writer = null;
writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
// add 100 documents
AddDocs(writer, 100);
Assert.AreEqual(100, writer.MaxDoc);
writer.Dispose();
writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(1000).SetMergePolicy(NewLogMergePolicy(false)));
// add 140 documents in separate files
AddDocs(writer, 40);
writer.Dispose();
writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(1000).SetMergePolicy(NewLogMergePolicy(false)));
AddDocs(writer, 100);
writer.Dispose();
writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND));
try
{
// cannot add self
writer.AddIndexes(aux, dir);
Assert.IsTrue(false);
}
#pragma warning disable 168
catch (System.ArgumentException e)
#pragma warning restore 168
{
Assert.AreEqual(100, writer.MaxDoc);
}
writer.Dispose();
// make sure the index is correct
VerifyNumDocs(dir, 100);
dir.Dispose();
aux.Dispose();
}
// in all the remaining tests, make the doc count of the oldest segment
// in dir large so that it is never merged in addIndexes()
// case 1: no tail segments
[Test]
public virtual void TestNoTailSegments()
{
// main directory
Directory dir = NewDirectory();
// auxiliary directory
Directory aux = NewDirectory();
SetUpDirs(dir, aux);
IndexWriter writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy(4)));
AddDocs(writer, 10);
writer.AddIndexes(aux);
Assert.AreEqual(1040, writer.MaxDoc);
Assert.AreEqual(1000, writer.GetDocCount(0));
writer.Dispose();
// make sure the index is correct
VerifyNumDocs(dir, 1040);
dir.Dispose();
aux.Dispose();
}
// case 2: tail segments, invariants hold, no copy
[Test]
public virtual void TestNoCopySegments()
{
// main directory
Directory dir = NewDirectory();
// auxiliary directory
Directory aux = NewDirectory();
SetUpDirs(dir, aux);
IndexWriter writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(9).SetMergePolicy(NewLogMergePolicy(4)));
AddDocs(writer, 2);
writer.AddIndexes(aux);
Assert.AreEqual(1032, writer.MaxDoc);
Assert.AreEqual(1000, writer.GetDocCount(0));
writer.Dispose();
// make sure the index is correct
VerifyNumDocs(dir, 1032);
dir.Dispose();
aux.Dispose();
}
// case 3: tail segments, invariants hold, copy, invariants hold
[Test]
public virtual void TestNoMergeAfterCopy()
{
// main directory
Directory dir = NewDirectory();
// auxiliary directory
Directory aux = NewDirectory();
SetUpDirs(dir, aux);
IndexWriter writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy(4)));
writer.AddIndexes(aux, new MockDirectoryWrapper(Random(), new RAMDirectory(aux, NewIOContext(Random()))));
Assert.AreEqual(1060, writer.MaxDoc);
Assert.AreEqual(1000, writer.GetDocCount(0));
writer.Dispose();
// make sure the index is correct
VerifyNumDocs(dir, 1060);
dir.Dispose();
aux.Dispose();
}
// case 4: tail segments, invariants hold, copy, invariants not hold
[Test]
public virtual void TestMergeAfterCopy()
{
// main directory
Directory dir = NewDirectory();
// auxiliary directory
Directory aux = NewDirectory();
SetUpDirs(dir, aux, true);
IndexWriterConfig dontMergeConfig = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
IndexWriter writer = new IndexWriter(aux, dontMergeConfig);
for (int i = 0; i < 20; i++)
{
writer.DeleteDocuments(new Term("id", "" + i));
}
writer.Dispose();
IndexReader reader = DirectoryReader.Open(aux);
Assert.AreEqual(10, reader.NumDocs);
reader.Dispose();
writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(4).SetMergePolicy(NewLogMergePolicy(4)));
if (VERBOSE)
{
Console.WriteLine("\nTEST: now addIndexes");
}
writer.AddIndexes(aux, new MockDirectoryWrapper(Random(), new RAMDirectory(aux, NewIOContext(Random()))));
Assert.AreEqual(1020, writer.MaxDoc);
Assert.AreEqual(1000, writer.GetDocCount(0));
writer.Dispose();
dir.Dispose();
aux.Dispose();
}
// case 5: tail segments, invariants not hold
[Test]
public virtual void TestMoreMerges()
{
// main directory
Directory dir = NewDirectory();
// auxiliary directory
Directory aux = NewDirectory();
Directory aux2 = NewDirectory();
SetUpDirs(dir, aux, true);
IndexWriter writer = NewWriter(aux2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(100).SetMergePolicy(NewLogMergePolicy(10)));
writer.AddIndexes(aux);
Assert.AreEqual(30, writer.MaxDoc);
Assert.AreEqual(3, writer.SegmentCount);
writer.Dispose();
IndexWriterConfig dontMergeConfig = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
writer = new IndexWriter(aux, dontMergeConfig);
for (int i = 0; i < 27; i++)
{
writer.DeleteDocuments(new Term("id", "" + i));
}
writer.Dispose();
IndexReader reader = DirectoryReader.Open(aux);
Assert.AreEqual(3, reader.NumDocs);
reader.Dispose();
dontMergeConfig = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
writer = new IndexWriter(aux2, dontMergeConfig);
for (int i = 0; i < 8; i++)
{
writer.DeleteDocuments(new Term("id", "" + i));
}
writer.Dispose();
reader = DirectoryReader.Open(aux2);
Assert.AreEqual(22, reader.NumDocs);
reader.Dispose();
writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(6).SetMergePolicy(NewLogMergePolicy(4)));
writer.AddIndexes(aux, aux2);
Assert.AreEqual(1040, writer.MaxDoc);
Assert.AreEqual(1000, writer.GetDocCount(0));
writer.Dispose();
dir.Dispose();
aux.Dispose();
aux2.Dispose();
}
private IndexWriter NewWriter(Directory dir, IndexWriterConfig conf)
{
conf.SetMergePolicy(new LogDocMergePolicy());
IndexWriter writer = new IndexWriter(dir, conf);
return writer;
}
private void AddDocs(IndexWriter writer, int numDocs)
{
for (int i = 0; i < numDocs; i++)
{
Document doc = new Document();
doc.Add(NewTextField("content", "aaa", Field.Store.NO));
writer.AddDocument(doc);
}
}
private void AddDocs2(IndexWriter writer, int numDocs)
{
for (int i = 0; i < numDocs; i++)
{
Document doc = new Document();
doc.Add(NewTextField("content", "bbb", Field.Store.NO));
writer.AddDocument(doc);
}
}
private void VerifyNumDocs(Directory dir, int numDocs)
{
IndexReader reader = DirectoryReader.Open(dir);
Assert.AreEqual(numDocs, reader.MaxDoc);
Assert.AreEqual(numDocs, reader.NumDocs);
reader.Dispose();
}
private void VerifyTermDocs(Directory dir, Term term, int numDocs)
{
IndexReader reader = DirectoryReader.Open(dir);
DocsEnum docsEnum = TestUtil.Docs(Random(), reader, term.Field, term.Bytes, null, null, DocsFlags.NONE);
int count = 0;
while (docsEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
{
count++;
}
Assert.AreEqual(numDocs, count);
reader.Dispose();
}
private void SetUpDirs(Directory dir, Directory aux)
{
SetUpDirs(dir, aux, false);
}
private void SetUpDirs(Directory dir, Directory aux, bool withID)
{
IndexWriter writer = null;
writer = NewWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(1000));
// add 1000 documents in 1 segment
if (withID)
{
AddDocsWithID(writer, 1000, 0);
}
else
{
AddDocs(writer, 1000);
}
Assert.AreEqual(1000, writer.MaxDoc);
Assert.AreEqual(1, writer.SegmentCount);
writer.Dispose();
writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(1000).SetMergePolicy(NewLogMergePolicy(false, 10)));
// add 30 documents in 3 segments
for (int i = 0; i < 3; i++)
{
if (withID)
{
AddDocsWithID(writer, 10, 10 * i);
}
else
{
AddDocs(writer, 10);
}
writer.Dispose();
writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(1000).SetMergePolicy(NewLogMergePolicy(false, 10)));
}
Assert.AreEqual(30, writer.MaxDoc);
Assert.AreEqual(3, writer.SegmentCount);
writer.Dispose();
}
// LUCENE-1270
[Test]
public virtual void TestHangOnClose()
{
Directory dir = NewDirectory();
LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy();
lmp.NoCFSRatio = 0.0;
lmp.MergeFactor = 100;
IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(5).SetMergePolicy(lmp));
Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED);
customType.StoreTermVectors = true;
customType.StoreTermVectorPositions = true;
customType.StoreTermVectorOffsets = true;
doc.Add(NewField("content", "aaa bbb ccc ddd eee fff ggg hhh iii", customType));
for (int i = 0; i < 60; i++)
{
writer.AddDocument(doc);
}
Document doc2 = new Document();
FieldType customType2 = new FieldType();
customType2.IsStored = true;
doc2.Add(NewField("content", "aaa bbb ccc ddd eee fff ggg hhh iii", customType2));
doc2.Add(NewField("content", "aaa bbb ccc ddd eee fff ggg hhh iii", customType2));
doc2.Add(NewField("content", "aaa bbb ccc ddd eee fff ggg hhh iii", customType2));
doc2.Add(NewField("content", "aaa bbb ccc ddd eee fff ggg hhh iii", customType2));
for (int i = 0; i < 10; i++)
{
writer.AddDocument(doc2);
}
writer.Dispose();
Directory dir2 = NewDirectory();
lmp = new LogByteSizeMergePolicy();
lmp.MinMergeMB = 0.0001;
lmp.NoCFSRatio = 0.0;
lmp.MergeFactor = 4;
writer = new IndexWriter(dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergeScheduler(new SerialMergeScheduler()).SetMergePolicy(lmp));
writer.AddIndexes(dir);
writer.Dispose();
dir.Dispose();
dir2.Dispose();
}
// TODO: these are also in TestIndexWriter... add a simple doc-writing method
// like this to LuceneTestCase?
private void AddDoc(IndexWriter writer)
{
Document doc = new Document();
doc.Add(NewTextField("content", "aaa", Field.Store.NO));
writer.AddDocument(doc);
}
private abstract class RunAddIndexesThreads
{
private readonly TestAddIndexes OuterInstance;
internal Directory Dir, Dir2;
internal const int NUM_INIT_DOCS = 17;
internal IndexWriter Writer2;
internal readonly IList<Exception> Failures = new List<Exception>();
internal volatile bool DidClose;
internal readonly IndexReader[] Readers;
internal readonly int NUM_COPY;
internal const int NUM_THREADS = 5;
internal readonly ThreadClass[] Threads = new ThreadClass[NUM_THREADS];
public RunAddIndexesThreads(TestAddIndexes outerInstance, int numCopy)
{
this.OuterInstance = outerInstance;
NUM_COPY = numCopy;
Dir = new MockDirectoryWrapper(Random(), new RAMDirectory());
IndexWriter writer = new IndexWriter(Dir, (IndexWriterConfig)new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2));
for (int i = 0; i < NUM_INIT_DOCS; i++)
{
outerInstance.AddDoc(writer);
}
writer.Dispose();
Dir2 = NewDirectory();
Writer2 = new IndexWriter(Dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
Writer2.Commit();
Readers = new IndexReader[NUM_COPY];
for (int i = 0; i < NUM_COPY; i++)
{
Readers[i] = DirectoryReader.Open(Dir);
}
}
internal virtual void LaunchThreads(int numIter)
{
for (int i = 0; i < NUM_THREADS; i++)
{
Threads[i] = new ThreadAnonymousInnerClassHelper(this, numIter);
}
for (int i = 0; i < NUM_THREADS; i++)
{
Threads[i].Start();
}
}
private class ThreadAnonymousInnerClassHelper : ThreadClass
{
private readonly RunAddIndexesThreads OuterInstance;
private int NumIter;
public ThreadAnonymousInnerClassHelper(RunAddIndexesThreads outerInstance, int numIter)
{
this.OuterInstance = outerInstance;
this.NumIter = numIter;
}
public override void Run()
{
try
{
Directory[] dirs = new Directory[OuterInstance.NUM_COPY];
for (int k = 0; k < OuterInstance.NUM_COPY; k++)
{
dirs[k] = new MockDirectoryWrapper(Random(), new RAMDirectory(OuterInstance.Dir, NewIOContext(Random())));
}
int j = 0;
while (true)
{
// System.out.println(Thread.currentThread().getName() + ": iter j=" + j);
if (NumIter > 0 && j == NumIter)
{
break;
}
OuterInstance.DoBody(j++, dirs);
}
}
catch (Exception t)
{
OuterInstance.Handle(t);
}
}
}
internal virtual void JoinThreads()
{
for (int i = 0; i < NUM_THREADS; i++)
{
Threads[i].Join();
}
}
internal virtual void Close(bool doWait)
{
DidClose = true;
Writer2.Dispose(doWait);
}
internal virtual void CloseDir()
{
for (int i = 0; i < NUM_COPY; i++)
{
Readers[i].Dispose();
}
Dir2.Dispose();
}
internal abstract void DoBody(int j, Directory[] dirs);
internal abstract void Handle(Exception t);
}
private class CommitAndAddIndexes : RunAddIndexesThreads
{
private readonly TestAddIndexes OuterInstance;
public CommitAndAddIndexes(TestAddIndexes outerInstance, int numCopy)
: base(outerInstance, numCopy)
{
this.OuterInstance = outerInstance;
}
internal override void Handle(Exception t)
{
Console.Error.WriteLine(t.StackTrace);
lock (Failures)
{
Failures.Add(t);
}
}
internal override void DoBody(int j, Directory[] dirs)
{
switch (j % 5)
{
case 0:
if (VERBOSE)
{
Console.WriteLine(Thread.CurrentThread.Name + ": TEST: addIndexes(Dir[]) then full merge");
}
Writer2.AddIndexes(dirs);
Writer2.ForceMerge(1);
break;
case 1:
if (VERBOSE)
{
Console.WriteLine(Thread.CurrentThread.Name + ": TEST: addIndexes(Dir[])");
}
Writer2.AddIndexes(dirs);
break;
case 2:
if (VERBOSE)
{
Console.WriteLine(Thread.CurrentThread.Name + ": TEST: addIndexes(IndexReader[])");
}
Writer2.AddIndexes(Readers);
break;
case 3:
if (VERBOSE)
{
Console.WriteLine(Thread.CurrentThread.Name + ": TEST: addIndexes(Dir[]) then maybeMerge");
}
Writer2.AddIndexes(dirs);
Writer2.MaybeMerge();
break;
case 4:
if (VERBOSE)
{
Console.WriteLine(Thread.CurrentThread.Name + ": TEST: commit");
}
Writer2.Commit();
break;
}
}
}
// LUCENE-1335: test simultaneous addIndexes & commits
// from multiple threads
[Test]
public virtual void TestAddIndexesWithThreads()
{
int NUM_ITER = TEST_NIGHTLY ? 15 : 5;
const int NUM_COPY = 3;
CommitAndAddIndexes c = new CommitAndAddIndexes(this, NUM_COPY);
c.LaunchThreads(NUM_ITER);
for (int i = 0; i < 100; i++)
{
AddDoc(c.Writer2);
}
c.JoinThreads();
int expectedNumDocs = 100 + NUM_COPY * (4 * NUM_ITER / 5) * RunAddIndexesThreads.NUM_THREADS * RunAddIndexesThreads.NUM_INIT_DOCS;
Assert.AreEqual(expectedNumDocs, c.Writer2.NumDocs, "expected num docs don't match - failures: " + Environment.NewLine
+ string.Join(Environment.NewLine, c.Failures.Select(x => x.ToString())));
c.Close(true);
Assert.IsTrue(c.Failures.Count == 0, "found unexpected failures: " + c.Failures);
IndexReader reader = DirectoryReader.Open(c.Dir2);
Assert.AreEqual(expectedNumDocs, reader.NumDocs);
reader.Dispose();
c.CloseDir();
}
private class CommitAndAddIndexes2 : CommitAndAddIndexes
{
private readonly TestAddIndexes OuterInstance;
public CommitAndAddIndexes2(TestAddIndexes outerInstance, int numCopy)
: base(outerInstance, numCopy)
{
this.OuterInstance = outerInstance;
}
internal override void Handle(Exception t)
{
if (!(t is ObjectDisposedException) && !(t is System.NullReferenceException))
{
Console.Error.WriteLine(t.StackTrace);
lock (Failures)
{
Failures.Add(t);
}
}
}
}
// LUCENE-1335: test simultaneous addIndexes & close
[Test]
public virtual void TestAddIndexesWithClose()
{
const int NUM_COPY = 3;
CommitAndAddIndexes2 c = new CommitAndAddIndexes2(this, NUM_COPY);
//c.writer2.setInfoStream(System.out);
c.LaunchThreads(-1);
// Close w/o first stopping/joining the threads
c.Close(true);
//c.writer2.Dispose();
c.JoinThreads();
c.CloseDir();
Assert.IsTrue(c.Failures.Count == 0);
}
private class CommitAndAddIndexes3 : RunAddIndexesThreads
{
private readonly TestAddIndexes OuterInstance;
public CommitAndAddIndexes3(TestAddIndexes outerInstance, int numCopy)
: base(outerInstance, numCopy)
{
this.OuterInstance = outerInstance;
}
internal override void DoBody(int j, Directory[] dirs)
{
switch (j % 5)
{
case 0:
if (VERBOSE)
{
Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": addIndexes + full merge");
}
Writer2.AddIndexes(dirs);
Writer2.ForceMerge(1);
break;
case 1:
if (VERBOSE)
{
Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": addIndexes");
}
Writer2.AddIndexes(dirs);
break;
case 2:
if (VERBOSE)
{
Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": addIndexes(IR[])");
}
Writer2.AddIndexes(Readers);
break;
case 3:
if (VERBOSE)
{
Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": full merge");
}
Writer2.ForceMerge(1);
break;
case 4:
if (VERBOSE)
{
Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": commit");
}
Writer2.Commit();
break;
}
}
internal override void Handle(Exception t)
{
bool report = true;
if (t is ObjectDisposedException || t is MergePolicy.MergeAbortedException || t is System.NullReferenceException)
{
report = !DidClose;
}
// LUCENENET specific - since NoSuchDirectoryException subclasses FileNotFoundException
// in Lucene, we need to handle it here to be on the safe side.
else if (t is FileNotFoundException/* || t is NoSuchFileException*/ || t is DirectoryNotFoundException)
{
report = !DidClose;
}
else if (t is IOException)
{
Exception t2 = t.InnerException;
if (t2 is MergePolicy.MergeAbortedException)
{
report = !DidClose;
}
}
if (report)
{
Console.Out.WriteLine(t.StackTrace);
lock (Failures)
{
Failures.Add(t);
}
}
}
}
// LUCENE-1335: test simultaneous addIndexes & close
[Test]
public virtual void TestAddIndexesWithCloseNoWait()
{
const int NUM_COPY = 50;
CommitAndAddIndexes3 c = new CommitAndAddIndexes3(this, NUM_COPY);
c.LaunchThreads(-1);
Thread.Sleep(TestUtil.NextInt(Random(), 10, 500));
// Close w/o first stopping/joining the threads
if (VERBOSE)
{
Console.WriteLine("TEST: now close(false)");
}
c.Close(false);
c.JoinThreads();
if (VERBOSE)
{
Console.WriteLine("TEST: done join threads");
}
c.CloseDir();
Assert.IsTrue(c.Failures.Count == 0);
}
// LUCENE-1335: test simultaneous addIndexes & close
[Test]
public virtual void TestAddIndexesWithRollback()
{
int NUM_COPY = TEST_NIGHTLY ? 50 : 5;
CommitAndAddIndexes3 c = new CommitAndAddIndexes3(this, NUM_COPY);
c.LaunchThreads(-1);
Thread.Sleep(TestUtil.NextInt(Random(), 10, 500));
// Close w/o first stopping/joining the threads
if (VERBOSE)
{
Console.WriteLine("TEST: now force rollback");
}
c.DidClose = true;
c.Writer2.Rollback();
c.JoinThreads();
c.CloseDir();
Assert.IsTrue(c.Failures.Count == 0);
}
// LUCENE-2996: tests that addIndexes(IndexReader) applies existing deletes correctly.
[Test]
public virtual void TestExistingDeletes()
{
Directory[] dirs = new Directory[2];
for (int i = 0; i < dirs.Length; i++)
{
dirs[i] = NewDirectory();
IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
IndexWriter writer = new IndexWriter(dirs[i], conf);
Document doc = new Document();
doc.Add(new StringField("id", "myid", Field.Store.NO));
writer.AddDocument(doc);
writer.Dispose();
}
IndexWriterConfig conf_ = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
IndexWriter writer_ = new IndexWriter(dirs[0], conf_);
// Now delete the document
writer_.DeleteDocuments(new Term("id", "myid"));
IndexReader r = DirectoryReader.Open(dirs[1]);
try
{
writer_.AddIndexes(r);
}
finally
{
r.Dispose();
}
writer_.Commit();
Assert.AreEqual(1, writer_.NumDocs, "Documents from the incoming index should not have been deleted");
writer_.Dispose();
foreach (Directory dir in dirs)
{
dir.Dispose();
}
}
// just like addDocs but with ID, starting from docStart
private void AddDocsWithID(IndexWriter writer, int numDocs, int docStart)
{
for (int i = 0; i < numDocs; i++)
{
Document doc = new Document();
doc.Add(NewTextField("content", "aaa", Field.Store.NO));
doc.Add(NewTextField("id", "" + (docStart + i), Field.Store.YES));
writer.AddDocument(doc);
}
}
[Test]
public virtual void TestSimpleCaseCustomCodec()
{
// main directory
Directory dir = NewDirectory();
// two auxiliary directories
Directory aux = NewDirectory();
Directory aux2 = NewDirectory();
Codec codec = new CustomPerFieldCodec();
IndexWriter writer = null;
writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetCodec(codec));
// add 100 documents
AddDocsWithID(writer, 100, 0);
Assert.AreEqual(100, writer.MaxDoc);
writer.Commit();
writer.Dispose();
TestUtil.CheckIndex(dir);
writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetCodec(codec).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy(false)));
// add 40 documents in separate files
AddDocs(writer, 40);
Assert.AreEqual(40, writer.MaxDoc);
writer.Commit();
writer.Dispose();
writer = NewWriter(aux2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetCodec(codec));
// add 40 documents in compound files
AddDocs2(writer, 50);
Assert.AreEqual(50, writer.MaxDoc);
writer.Commit();
writer.Dispose();
// test doc count before segments are merged
writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetCodec(codec));
Assert.AreEqual(100, writer.MaxDoc);
writer.AddIndexes(aux, aux2);
Assert.AreEqual(190, writer.MaxDoc);
writer.Dispose();
dir.Dispose();
aux.Dispose();
aux2.Dispose();
}
private sealed class CustomPerFieldCodec : Lucene46Codec
{
internal readonly PostingsFormat SimpleTextFormat;
internal readonly PostingsFormat DefaultFormat;
internal readonly PostingsFormat MockSepFormat;
public CustomPerFieldCodec()
{
SimpleTextFormat = Codecs.PostingsFormat.ForName("SimpleText");
DefaultFormat = Codecs.PostingsFormat.ForName("Lucene41");
MockSepFormat = Codecs.PostingsFormat.ForName("MockSep");
}
public override PostingsFormat GetPostingsFormatForField(string field)
{
if (field.Equals("id"))
{
return SimpleTextFormat;
}
else if (field.Equals("content"))
{
return MockSepFormat;
}
else
{
return DefaultFormat;
}
}
}
// LUCENE-2790: tests that the non CFS files were deleted by addIndexes
[Test]
public virtual void TestNonCFSLeftovers()
{
Directory[] dirs = new Directory[2];
for (int i = 0; i < dirs.Length; i++)
{
dirs[i] = new RAMDirectory();
IndexWriter w = new IndexWriter(dirs[i], new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
Document d = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED);
customType.StoreTermVectors = true;
d.Add(new Field("c", "v", customType));
w.AddDocument(d);
w.Dispose();
}
IndexReader[] readers = new IndexReader[] { DirectoryReader.Open(dirs[0]), DirectoryReader.Open(dirs[1]) };
Directory dir = new MockDirectoryWrapper(Random(), new RAMDirectory());
IndexWriterConfig conf = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetMergePolicy(NewLogMergePolicy(true));
MergePolicy lmp = conf.MergePolicy;
// Force creation of CFS:
lmp.NoCFSRatio = 1.0;
lmp.MaxCFSSegmentSizeMB = double.PositiveInfinity;
IndexWriter w3 = new IndexWriter(dir, conf);
w3.AddIndexes(readers);
w3.Dispose();
// we should now see segments_X,
// segments.gen,_Y.cfs,_Y.cfe, _Z.si
Assert.AreEqual(5, dir.ListAll().Length, "Only one compound segment should exist, but got: " + Arrays.ToString(dir.ListAll()));
dir.Dispose();
}
[CodecName("NotRegistered")]
private sealed class UnRegisteredCodec : FilterCodec
{
public UnRegisteredCodec()
: base(new Lucene46Codec())
{
}
}
/*
* simple test that ensures we getting expected exceptions
*/
[Test]
public virtual void TestAddIndexMissingCodec()
{
BaseDirectoryWrapper toAdd = NewDirectory();
// Disable checkIndex, else we get an exception because
// of the unregistered codec:
toAdd.CheckIndexOnClose = false;
{
IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
conf.SetCodec(new UnRegisteredCodec());
using (var w = new IndexWriter(toAdd, conf))
{
Document doc = new Document();
FieldType customType = new FieldType();
customType.IsIndexed = true;
doc.Add(NewField("foo", "bar", customType));
w.AddDocument(doc);
}
}
{
using (Directory dir = NewDirectory())
{
IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
conf.SetCodec(TestUtil.AlwaysPostingsFormat(new Pulsing41PostingsFormat(1 + Random().Next(20))));
IndexWriter w = new IndexWriter(dir, conf);
try
{
w.AddIndexes(toAdd);
Assert.Fail("no such codec");
}
#pragma warning disable 168
catch (System.ArgumentException ex)
#pragma warning restore 168
{
// expected
}
finally
{
w.Dispose();
}
using (IndexReader open = DirectoryReader.Open(dir))
{
Assert.AreEqual(0, open.NumDocs);
}
}
}
try
{
DirectoryReader.Open(toAdd);
Assert.Fail("no such codec");
}
#pragma warning disable 168
catch (System.ArgumentException ex)
#pragma warning restore 168
{
// expected
}
toAdd.Dispose();
}
// LUCENE-3575
[Test]
public virtual void TestFieldNamesChanged()
{
Directory d1 = NewDirectory();
RandomIndexWriter w = new RandomIndexWriter(Random(), d1, Similarity, TimeZone);
Document doc = new Document();
doc.Add(NewStringField("f1", "doc1 field1", Field.Store.YES));
doc.Add(NewStringField("id", "1", Field.Store.YES));
w.AddDocument(doc);
IndexReader r1 = w.Reader;
w.Dispose();
Directory d2 = NewDirectory();
w = new RandomIndexWriter(Random(), d2, Similarity, TimeZone);
doc = new Document();
doc.Add(NewStringField("f2", "doc2 field2", Field.Store.YES));
doc.Add(NewStringField("id", "2", Field.Store.YES));
w.AddDocument(doc);
IndexReader r2 = w.Reader;
w.Dispose();
Directory d3 = NewDirectory();
w = new RandomIndexWriter(Random(), d3, Similarity, TimeZone);
w.AddIndexes(r1, r2);
r1.Dispose();
d1.Dispose();
r2.Dispose();
d2.Dispose();
IndexReader r3 = w.Reader;
w.Dispose();
Assert.AreEqual(2, r3.NumDocs);
for (int docID = 0; docID < 2; docID++)
{
Document d = r3.Document(docID);
if (d.Get("id").Equals("1"))
{
Assert.AreEqual("doc1 field1", d.Get("f1"));
}
else
{
Assert.AreEqual("doc2 field2", d.Get("f2"));
}
}
r3.Dispose();
d3.Dispose();
}
[Test]
public virtual void TestAddEmpty()
{
Directory d1 = NewDirectory();
RandomIndexWriter w = new RandomIndexWriter(Random(), d1, Similarity, TimeZone);
MultiReader empty = new MultiReader();
w.AddIndexes(empty);
w.Dispose();
DirectoryReader dr = DirectoryReader.Open(d1);
foreach (AtomicReaderContext ctx in dr.Leaves)
{
Assert.IsTrue(ctx.Reader.MaxDoc > 0, "empty segments should be dropped by addIndexes");
}
dr.Dispose();
d1.Dispose();
}
// Currently it's impossible to end up with a segment with all documents
// deleted, as such segments are dropped. Still, to validate that addIndexes
// works with such segments, or readers that end up in such state, we fake an
// all deleted segment.
[Test]
public virtual void TestFakeAllDeleted()
{
Directory src = NewDirectory(), dest = NewDirectory();
RandomIndexWriter w = new RandomIndexWriter(Random(), src, Similarity, TimeZone);
w.AddDocument(new Document());
IndexReader allDeletedReader = new AllDeletedFilterReader((AtomicReader)w.Reader.Leaves[0].Reader);
w.Dispose();
w = new RandomIndexWriter(Random(), dest, Similarity, TimeZone);
w.AddIndexes(allDeletedReader);
w.Dispose();
DirectoryReader dr = DirectoryReader.Open(src);
foreach (AtomicReaderContext ctx in dr.Leaves)
{
Assert.IsTrue(ctx.Reader.MaxDoc > 0, "empty segments should be dropped by addIndexes");
}
dr.Dispose();
allDeletedReader.Dispose();
src.Dispose();
dest.Dispose();
}
/// <summary>
/// Make sure an open IndexWriter on an incoming Directory
/// causes a LockObtainFailedException
/// </summary>
[Test]
public virtual void TestLocksBlock()
{
Directory src = NewDirectory();
RandomIndexWriter w1 = new RandomIndexWriter(Random(), src, Similarity, TimeZone);
w1.AddDocument(new Document());
w1.Commit();
Directory dest = NewDirectory();
IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
iwc.SetWriteLockTimeout(1);
RandomIndexWriter w2 = new RandomIndexWriter(Random(), dest, iwc);
try
{
w2.AddIndexes(src);
Assert.Fail("did not hit expected exception");
}
#pragma warning disable 168
catch (LockObtainFailedException lofe)
#pragma warning restore 168
{
// expected
}
IOUtils.Dispose(w1, w2, src, dest);
}
}
}
| |
using Akka.Actor;
using Akka.IO;
using System;
using System.IO;
using System.Text;
// Why the hell did they mark some of the most of the useful methods on ByteString as obsolete?
#pragma warning disable CS0618
namespace AKDK.Actors.Streaming
{
using Messages;
using Utilities;
/// <summary>
/// Actor that reads lines from a stream.
/// </summary>
/// <remarks>
/// This actor is only necessary until Akka.Streams for netstandard includes support for sourcing data from streams.
/// </remarks>
public sealed class StreamLines
: ReceiveActorEx
{
/// <summary>
/// The default name for instances of the <see cref="StreamLines"/> actor.
/// </summary>
public static readonly string ActorName = "stream-lines";
/// <summary>
/// The bytes representing a Windows-style line terminator.
/// </summary>
static readonly string WindowsNewLine = "\r\n";
/// <summary>
/// The bytes representing a Unix-style line terminator.
/// </summary>
static readonly string UnixNewLine = "\n";
/// <summary>
/// <see cref="Props"/> used to create the <see cref="ReadStream"/> actor for reading from the underlying stream.
/// </summary>
Props _readStreamProps;
/// <summary>
/// The <see cref="ReadStream"/> actor used to read from the underlying stream.
/// </summary>
IActorRef _readStream;
/// <summary>
/// Create a new <see cref="StreamLines"/> actor.
/// </summary>
/// <param name="correlationId">
/// The message correlation Id that will be sent with the stream data.
/// </param>
/// <param name="owner">
/// The actor that owns the <see cref="StreamLines"/> actor (this actor will receive the stream data).
/// </param>
/// <param name="stream">
/// The <see cref="Stream"/> to read from.
/// </param>
/// <param name="encoding">
/// The expected stream encoding.
/// </param>
/// <param name="bufferSize">
/// The buffer size to use when reading from the stream.
/// </param>
/// <param name="windowsLineEndings">
/// Expect Windows-style line endings (CRLF) instead of Unix-style line endings (CR)?
/// </param>
///
public StreamLines(string correlationId, IActorRef owner, Stream stream, Encoding encoding, int bufferSize, bool windowsLineEndings)
{
if (String.IsNullOrWhiteSpace(correlationId))
throw new ArgumentException($"Argument cannot be null, empty, or entirely composed of whitespace: {nameof(correlationId)}.", nameof(correlationId));
if (owner == null)
throw new ArgumentNullException(nameof(owner));
if (stream == null)
throw new ArgumentNullException(nameof(stream));
if (encoding == null)
throw new ArgumentNullException(nameof(encoding));
_readStreamProps = ReadStream.Create(correlationId, Self, stream, bufferSize);
ByteString lineEnding = ByteString.FromString(windowsLineEndings ? WindowsNewLine : UnixNewLine, encoding);
ByteString buffer = ByteString.Empty;
bool isEndOfStream = false;
Receive<ReadStream.StreamData>(streamData =>
{
isEndOfStream = streamData.IsEndOfStream;
buffer += streamData.Data;
while (buffer.Count > 0)
{
int lineEndingIndex = buffer.IndexOf(lineEnding);
if (lineEndingIndex == -1)
{
if (isEndOfStream)
lineEndingIndex = buffer.Count; // Publish whatever we have left.
else
break; // Wait for the rest of the current line to arrive.
}
var (left, right) = buffer.SplitAt(lineEndingIndex);
buffer = right.Drop(lineEnding.Count);
ByteString lineData = left.Compact(); // If we don't compact the ByteString before decoding, then it feeds chunks to the encoder that are invalid for Encoding.Unicode.
string lineText = lineData.ToString(encoding);
System.Diagnostics.Debug.WriteLine($"StreamLines:PublishLine - '{lineText}'");
owner.Tell(
new StreamLine(streamData.CorrelationId,
line: lineText
)
);
}
if (isEndOfStream)
{
owner.Tell(
new EndOfStream(correlationId)
);
Context.Stop(Self);
}
});
Receive<ReadStream.StreamError>(error =>
{
owner.Tell(error);
});
Receive<ReadStream.Close>(close =>
{
_readStream.Forward(close);
});
Receive<Terminated>(terminated =>
{
if (terminated.ActorRef == _readStream)
{
if (!isEndOfStream)
{
owner.Tell(
new EndOfStream(correlationId)
);
}
}
else
Unhandled(terminated);
});
}
/// <summary>
/// Called when the actor is started.
/// </summary>
protected override void PreStart()
{
base.PreStart();
_readStream = Context.ActorOf(_readStreamProps, "read-stream");
// Raise end-of-stream if source actor dies.
Context.Watch(_readStream);
}
/// <summary>
/// Generate <see cref="Props"/> to create a new <see cref="StreamLines"/> actor.
/// </summary>
/// <param name="correlationId">
/// The message correlation Id that will be sent with the stream data.
/// </param>
/// <param name="owner">
/// The actor that owns the <see cref="StreamLines"/> actor (this actor will receive the stream data).
/// </param>
/// <param name="stream">
/// The <see cref="Stream"/> to read from.
/// </param>
/// <param name="encoding">
/// The expected stream encoding.
/// </param>
/// <param name="bufferSize">
/// The buffer size to use when reading from the stream.
/// </param>
/// <param name="windowsLineEndings">
/// Expect Windows-style line endings (CRLF) instead of Unix-style line endings (CR)?
/// </param>
public static Props Create(string correlationId, IActorRef owner, Stream stream, Encoding encoding, int bufferSize = ReadStream.DefaultBufferSize, bool windowsLineEndings = false)
{
if (encoding == null)
encoding = Encoding.Unicode;
return Props.Create(
() => new StreamLines(correlationId, owner, stream, encoding, bufferSize, windowsLineEndings)
);
}
/// <summary>
/// Represents a line of text from a stream (without the line terminator).
/// </summary>
public class StreamLine
: CorrelatedMessage
{
/// <summary>
/// Create a new <see cref="StreamLine"/> message.
/// </summary>
/// <param name="correlationId">
/// The message correlation Id.
/// </param>
/// <param name="line">
/// The line of text.
/// </param>
public StreamLine(string correlationId, string line)
: base(correlationId)
{
Line = line;
}
/// <summary>
/// The line of text.
/// </summary>
public string Line { get; }
}
/// <summary>
/// Represents the end of a stream.
/// </summary>
public class EndOfStream
{
/// <summary>
/// Create a new <see cref="EndOfStream"/> message.
/// </summary>
/// <param name="correlationId">
/// The message correlation Id.
/// </param>
public EndOfStream(string correlationId)
{
CorrelationId = correlationId;
}
/// <summary>
/// The message correlation Id.
/// </summary>
public string CorrelationId { get; }
}
}
}
| |
/*
Copyright (c) 2003-2006 Niels Kokholm and Peter Sestoft
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
using System;
using RazorDBx.C5;
using NUnit.Framework;
using SCG = System.Collections.Generic;
namespace C5UnitTests.SortingTests
{
[TestFixture]
public class SortRandom
{
IC ic;
Random ran;
int[] a;
int length;
[SetUp]
public void Init ()
{
ic = new IC ();
ran = new Random (3456);
length = 100000;
a = new int[length];
for (int i = 0; i < length; i++)
a [i] = ran.Next ();
}
[Test]
public void HeapSort ()
{
Sorting.HeapSort<int> (a, 0, length, ic);
for (int i = 1; i < length; i++)
Assert.IsTrue (a [i - 1] <= a [i], "Inversion at " + i);
}
[Test]
public void IntroSort ()
{
Sorting.IntroSort<int> (a, 0, length, ic);
for (int i = 1; i < length; i++)
Assert.IsTrue (a [i - 1] <= a [i], "Inversion at " + i);
}
[Test]
public void InsertionSort ()
{
length = 1000;
Sorting.InsertionSort<int> (a, 0, length, ic);
for (int i = 1; i < length; i++)
Assert.IsTrue (a [i - 1] <= a [i], "Inversion at " + i);
Sorting.InsertionSort<int> (a, length, 2 * length, ic);
for (int i = length + 1; i < 2 * length; i++)
Assert.IsTrue (a [i - 1] <= a [i], "Inversion at " + i);
}
[TearDown]
public void Dispose ()
{
ic = null;
}
}
[TestFixture]
public class SortRandomDuplicates
{
IC ic;
Random ran;
int[] a;
int length;
[SetUp]
public void Init ()
{
ic = new IC ();
ran = new Random (3456);
length = 100000;
a = new int[length];
for (int i = 0; i < length; i++)
a [i] = ran.Next (3, 23);
}
[Test]
public void HeapSort ()
{
Sorting.HeapSort<int> (a, 0, length, ic);
for (int i = 1; i < length; i++)
Assert.IsTrue (a [i - 1] <= a [i], "Inversion at " + i);
}
[Test]
public void IntroSort ()
{
Sorting.IntroSort<int> (a, 0, length, ic);
for (int i = 1; i < length; i++)
Assert.IsTrue (a [i - 1] <= a [i], "Inversion at " + i);
}
[Test]
public void InsertionSort ()
{
length = 1000;
Sorting.InsertionSort<int> (a, 0, length, ic);
for (int i = 1; i < length; i++)
Assert.IsTrue (a [i - 1] <= a [i], "Inversion at " + i);
Sorting.InsertionSort<int> (a, length, 2 * length, ic);
for (int i = length + 1; i < 2 * length; i++)
Assert.IsTrue (a [i - 1] <= a [i], "Inversion at " + i);
}
[TearDown]
public void Dispose ()
{
ic = null;
a = null;
ran = null;
}
}
[TestFixture]
public class SortIncreasing
{
IC ic;
int[] a;
int length;
[SetUp]
public void Init ()
{
ic = new IC ();
length = 100000;
a = new int[length];
for (int i = 0; i < length; i++)
a [i] = i;
}
[Test]
public void HeapSort ()
{
Sorting.HeapSort<int> (a, 0, length, ic);
for (int i = 1; i < length; i++)
Assert.IsTrue (a [i - 1] <= a [i], "Inversion at " + i);
}
[Test]
public void IntroSort ()
{
Sorting.IntroSort<int> (a, 0, length, ic);
for (int i = 1; i < length; i++)
Assert.IsTrue (a [i - 1] <= a [i], "Inversion at " + i);
}
[Test]
public void InsertionSort ()
{
length = 1000;
Sorting.InsertionSort<int> (a, 0, length, ic);
for (int i = 1; i < length; i++)
Assert.IsTrue (a [i - 1] <= a [i], "Inversion at " + i);
Sorting.InsertionSort<int> (a, length, 2 * length, ic);
for (int i = length + 1; i < 2 * length; i++)
Assert.IsTrue (a [i - 1] <= a [i], "Inversion at " + i);
}
[TearDown]
public void Dispose ()
{
ic = null;
a = null;
}
}
[TestFixture]
public class SortDecreasing
{
IC ic;
int[] a;
int length;
[SetUp]
public void Init ()
{
ic = new IC ();
length = 100000;
a = new int[length];
for (int i = 0; i < length; i++)
a [i] = -i;
}
[Test]
public void HeapSort ()
{
Sorting.HeapSort<int> (a, 0, length, ic);
for (int i = 1; i < length; i++)
Assert.IsTrue (a [i - 1] <= a [i], "Inversion at " + i);
}
[Test]
public void IntroSort ()
{
Sorting.IntroSort<int> (a, 0, length, ic);
for (int i = 1; i < length; i++)
Assert.IsTrue (a [i - 1] <= a [i], "Inversion at " + i);
}
[Test]
public void InsertionSort ()
{
length = 1000;
Sorting.InsertionSort<int> (a, 0, length, ic);
for (int i = 1; i < length; i++)
Assert.IsTrue (a [i - 1] <= a [i], "Inversion at " + i);
Sorting.InsertionSort<int> (a, length, 2 * length, ic);
for (int i = length + 1; i < 2 * length; i++)
Assert.IsTrue (a [i - 1] <= a [i], "Inversion at " + i);
}
[TearDown]
public void Dispose ()
{
ic = null;
a = null;
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.IO;
using System.IO.MemoryMappedFiles;
using Xunit;
[Collection("CreateNew")]
public class CreateNew : MMFTestBase
{
[Fact]
public static void CreateNewTestCases()
{
bool bResult = false;
CreateNew test = new CreateNew();
try
{
bResult = test.RunTest();
}
catch (Exception exc_main)
{
bResult = false;
Console.WriteLine("FAiL! Error in CreateNew! Uncaught Exception in main(), exc_main==" + exc_main.ToString());
}
Assert.True(bResult, "One or more test cases failed.");
}
public bool RunTest()
{
try
{
////////////////////////////////////////////////////////////////////////
// CreateNew(mapName, capcity)
////////////////////////////////////////////////////////////////////////
// [] mapName
// mapname > 260 chars
VerifyCreateNew("Loc111", "CreateNew" + new String('a', 1000), 4096);
// null
VerifyCreateNew("Loc112", null, 4096);
// empty string disallowed
VerifyCreateNewException<ArgumentException>("Loc113", String.Empty, 4096);
// all whitespace
VerifyCreateNew("Loc114", "\t\t \n\u00A0", 4096);
// MMF with this mapname already exists
using (MemoryMappedFile mmf = MemoryMappedFile.CreateNew("map115", 1000))
{
VerifyCreateNewException<IOException>("Loc115", "map115", 1000);
}
// MMF with this mapname existed, but was closed
VerifyCreateNew("Loc116", "map115", 500);
// "global/" prefix
VerifyCreateNew("Loc117", "global/CN_0", 4096);
// "local/" prefix
VerifyCreateNew("Loc118", "local/CN_1", 4096);
// [] capacity
// >0 capacity
VerifyCreateNew("Loc211", "CN_mapname211", 50);
// 0 capacity
VerifyCreateNewException<ArgumentOutOfRangeException>("Loc211", "CN_mapname211", 0);
// negative
VerifyCreateNewException<ArgumentOutOfRangeException>("Loc213", "CN_mapname213", -1);
// negative
VerifyCreateNewException<ArgumentOutOfRangeException>("Loc214", "CN_mapname214", -4096);
// Int64.MaxValue - cannot exceed local address space
if (IntPtr.Size == 4)
VerifyCreateNewException<ArgumentOutOfRangeException>("Loc215", "CN_mapname215", Int64.MaxValue);
else // 64-bit machine
VerifyCreateNewException<IOException>("Loc215b", "CN_mapname215", Int64.MaxValue); // valid but too large
////////////////////////////////////////////////////////////////////////
// CreateNew(mapName, capcity, MemoryMappedFileAccess)
////////////////////////////////////////////////////////////////////////
// [] access
// Write is disallowed
VerifyCreateNewException<ArgumentException>("Loc330", "CN_mapname330", 1000, MemoryMappedFileAccess.Write);
// valid access
MemoryMappedFileAccess[] accessList = new MemoryMappedFileAccess[] {
MemoryMappedFileAccess.Read,
MemoryMappedFileAccess.ReadWrite,
MemoryMappedFileAccess.CopyOnWrite,
MemoryMappedFileAccess.ReadExecute,
MemoryMappedFileAccess.ReadWriteExecute,
};
foreach (MemoryMappedFileAccess access in accessList)
{
VerifyCreateNew("Loc331_" + access, "CN_mapname331_" + access, 1000, access);
}
// invalid enum value
accessList = new MemoryMappedFileAccess[] {
(MemoryMappedFileAccess)(-1),
(MemoryMappedFileAccess)(6),
};
foreach (MemoryMappedFileAccess access in accessList)
{
VerifyCreateNewException<ArgumentOutOfRangeException>("Loc332_" + ((int)access), "CN_mapname332_" + ((int)access), 1000, access);
}
////////////////////////////////////////////////////////////////////////
// CreateNew(String, long, MemoryMappedFileAccess, MemoryMappedFileOptions,
// MemoryMappedFileSecurity, HandleInheritability)
////////////////////////////////////////////////////////////////////////
// [] mapName
// mapname > 260 chars
VerifyCreateNew("Loc411", "CreateNew2" + new String('a', 1000), 4096, MemoryMappedFileAccess.ReadWrite, MemoryMappedFileOptions.None, HandleInheritability.None);
// null
VerifyCreateNew("Loc412", null, 4096, MemoryMappedFileAccess.ReadWrite, MemoryMappedFileOptions.None, HandleInheritability.None);
// empty string disallowed
VerifyCreateNewException<ArgumentException>("Loc413", String.Empty, 4096, MemoryMappedFileAccess.ReadWrite, MemoryMappedFileOptions.None, HandleInheritability.None);
// all whitespace
VerifyCreateNew("Loc414", "\t\t \n\u00A0", 4096, MemoryMappedFileAccess.ReadWrite, MemoryMappedFileOptions.None, HandleInheritability.None);
// MMF with this mapname already exists
using (MemoryMappedFile mmf = MemoryMappedFile.CreateNew("map415", 4096))
{
VerifyCreateNewException<IOException>("Loc415", "map415", 4096, MemoryMappedFileAccess.Read, MemoryMappedFileOptions.None, HandleInheritability.None);
}
// MMF with this mapname existed, but was closed
VerifyCreateNew("Loc416", "map415", 4096, MemoryMappedFileAccess.ReadWrite, MemoryMappedFileOptions.None, HandleInheritability.None);
// "global/" prefix
VerifyCreateNew("Loc417", "global/CN_2", 4096, MemoryMappedFileAccess.ReadWrite, MemoryMappedFileOptions.None, HandleInheritability.None);
// "local/" prefix
VerifyCreateNew("Loc418", "local/CN_3", 4096, MemoryMappedFileAccess.ReadWrite, MemoryMappedFileOptions.None, HandleInheritability.None);
// [] capacity
// >0 capacity
VerifyCreateNew("Loc421", "CN_mapname421", 50, MemoryMappedFileAccess.ReadWrite, MemoryMappedFileOptions.None, HandleInheritability.None);
// 0 capacity
VerifyCreateNewException<ArgumentOutOfRangeException>("Loc422", "CN_mapname422", 0, MemoryMappedFileAccess.ReadWrite, MemoryMappedFileOptions.None, HandleInheritability.None);
// negative
VerifyCreateNewException<ArgumentOutOfRangeException>("Loc423", "CN_mapname423", -1, MemoryMappedFileAccess.ReadWrite, MemoryMappedFileOptions.None, HandleInheritability.None);
// negative
VerifyCreateNewException<ArgumentOutOfRangeException>("Loc424", "CN_mapname424", -4096, MemoryMappedFileAccess.ReadWrite, MemoryMappedFileOptions.None, HandleInheritability.None);
// Int64.MaxValue - cannot exceed local address space
if (IntPtr.Size == 4)
VerifyCreateNewException<ArgumentOutOfRangeException>("Loc425", "CN_mapname425", Int64.MaxValue, MemoryMappedFileAccess.ReadWrite, MemoryMappedFileOptions.None, HandleInheritability.None);
else // 64-bit machine
VerifyCreateNewException<IOException>("Loc425b", "CN_mapname425", Int64.MaxValue, MemoryMappedFileAccess.ReadWrite, MemoryMappedFileOptions.None, HandleInheritability.None); // valid but too large
// [] access
// Write is disallowed
VerifyCreateNewException<ArgumentException>("Loc430", "CN_mapname430", 1000, MemoryMappedFileAccess.Write, MemoryMappedFileOptions.None, HandleInheritability.None);
// valid access
accessList = new MemoryMappedFileAccess[] {
MemoryMappedFileAccess.Read,
MemoryMappedFileAccess.ReadWrite,
MemoryMappedFileAccess.CopyOnWrite,
MemoryMappedFileAccess.ReadExecute,
MemoryMappedFileAccess.ReadWriteExecute,
};
foreach (MemoryMappedFileAccess access in accessList)
{
VerifyCreateNew("Loc431_" + access, "CN_mapname431_" + access, 1000, access, MemoryMappedFileOptions.None, HandleInheritability.None);
}
// invalid enum value
accessList = new MemoryMappedFileAccess[] {
(MemoryMappedFileAccess)(-1),
(MemoryMappedFileAccess)(6),
};
foreach (MemoryMappedFileAccess access in accessList)
{
VerifyCreateNewException<ArgumentOutOfRangeException>("Loc432_" + ((int)access), "CN_mapname432_" + ((int)access), 1000, access, MemoryMappedFileOptions.None, HandleInheritability.None);
}
// [] options
// Default
VerifyCreateNew("Loc440a", null, 4096 * 1000);
VerifyCreateNew("Loc440b", null, 4096 * 10000);
// None
VerifyCreateNew("Loc441", "CN_mapname441", 4096 * 10000, MemoryMappedFileAccess.ReadWrite, MemoryMappedFileOptions.None, HandleInheritability.None);
// DelayAllocatePages
VerifyCreateNew("Loc442", "CN_mapname442", 4096 * 10000, MemoryMappedFileAccess.Read, MemoryMappedFileOptions.DelayAllocatePages, HandleInheritability.None);
// invalid
VerifyCreateNewException<ArgumentOutOfRangeException>("Loc444", "CN_mapname444", 100, MemoryMappedFileAccess.ReadWrite, (MemoryMappedFileOptions)(-1), HandleInheritability.None);
/// END TEST CASES
if (iCountErrors == 0)
{
return true;
}
else
{
Console.WriteLine("Fail: iCountErrors==" + iCountErrors);
return false;
}
}
catch (Exception ex)
{
Console.WriteLine("ERR999: Unexpected exception in runTest, {0}", ex);
return false;
}
}
/// START HELPER FUNCTIONS
public void VerifyCreateNew(String strLoc, String mapName, long capacity)
{
iCountTestcases++;
try
{
ulong initAvailPageFile = GetAvailPageFile();
using (MemoryMappedFile mmf = MemoryMappedFile.CreateNew(mapName, capacity))
{
VerifyAccess(strLoc, mmf, MemoryMappedFileAccess.ReadWrite, capacity);
VerifyHandleInheritability(strLoc, mmf.SafeMemoryMappedFileHandle, HandleInheritability.None);
}
}
catch (Exception ex)
{
iCountErrors++;
Console.WriteLine("ERROR, {0}: Unexpected exception, {1}", strLoc, ex);
}
}
public void VerifyCreateNewException<EXCTYPE>(String strLoc, String mapName, long capacity) where EXCTYPE : Exception
{
iCountTestcases++;
try
{
using (MemoryMappedFile mmf = MemoryMappedFile.CreateNew(mapName, capacity))
{
iCountErrors++;
Console.WriteLine("ERROR, {0}: No exception thrown, expected {1}", strLoc, typeof(EXCTYPE));
}
}
catch (EXCTYPE)
{
//Console.WriteLine("{0}: Expected, {1}: {2}", strLoc, ex.GetType(), ex.Message);
}
catch (Exception ex)
{
iCountErrors++;
Console.WriteLine("ERROR, {0}: Unexpected exception, {1}", strLoc, ex);
}
}
public void VerifyCreateNew(String strLoc, String mapName, long capacity, MemoryMappedFileAccess access)
{
iCountTestcases++;
try
{
ulong initAvailPageFile = GetAvailPageFile();
using (MemoryMappedFile mmf = MemoryMappedFile.CreateNew(mapName, capacity, access))
{
VerifyAccess(strLoc, mmf, access, capacity);
VerifyHandleInheritability(strLoc, mmf.SafeMemoryMappedFileHandle, HandleInheritability.None);
}
}
catch (Exception ex)
{
iCountErrors++;
Console.WriteLine("ERROR, {0}: Unexpected exception, {1}", strLoc, ex);
}
}
public void VerifyCreateNewException<EXCTYPE>(String strLoc, String mapName, long capacity, MemoryMappedFileAccess access) where EXCTYPE : Exception
{
iCountTestcases++;
try
{
using (MemoryMappedFile mmf = MemoryMappedFile.CreateNew(mapName, capacity, access))
{
iCountErrors++;
Console.WriteLine("ERROR, {0}: No exception thrown, expected {1}", strLoc, typeof(EXCTYPE));
}
}
catch (EXCTYPE)
{
//Console.WriteLine("{0}: Expected, {1}: {2}", strLoc, ex.GetType(), ex.Message);
}
catch (Exception ex)
{
iCountErrors++;
Console.WriteLine("ERROR, {0}: Unexpected exception, {1}", strLoc, ex);
}
}
public void VerifyCreateNew(String strLoc, String mapName, long capacity, MemoryMappedFileAccess access, MemoryMappedFileOptions options, HandleInheritability inheritability)
{
iCountTestcases++;
try
{
ulong initAvailPageFile = GetAvailPageFile();
using (MemoryMappedFile mmf = MemoryMappedFile.CreateNew(mapName, capacity, access, options, inheritability))
{
VerifyAccess(strLoc, mmf, access, capacity);
VerifyHandleInheritability(strLoc, mmf.SafeMemoryMappedFileHandle, inheritability);
}
}
catch (Exception ex)
{
iCountErrors++;
Console.WriteLine("ERROR, {0}: Unexpected exception, {1}", strLoc, ex);
}
}
public void VerifyCreateNewException<EXCTYPE>(String strLoc, String mapName, long capacity, MemoryMappedFileAccess access, MemoryMappedFileOptions options, HandleInheritability inheritability) where EXCTYPE : Exception
{
iCountTestcases++;
try
{
using (MemoryMappedFile mmf = MemoryMappedFile.CreateNew(mapName, capacity, access, options, inheritability))
{
iCountErrors++;
Console.WriteLine("ERROR, {0}: No exception thrown, expected {1}", strLoc, typeof(EXCTYPE));
}
}
catch (EXCTYPE)
{
//Console.WriteLine("{0}: Expected, {1}: {2}", strLoc, ex.GetType(), ex.Message);
}
catch (Exception ex)
{
iCountErrors++;
Console.WriteLine("ERROR, {0}: Unexpected exception, {1}", strLoc, ex);
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//testing double narrowing
using System;
public struct VT
{
public double f1;
public double delta1;
public int a1;
public double b1;
public double temp;
}
public class CL
{
//used for add and sub
public double f1 = 1.0;
public double delta1 = 1.0E-18;
//used for mul and div
public int a1 = 3;
public double b1 = (1.0 / 3.0);
//used as temp variable
public double temp;
}
public class ConvR8test
{
//static field of a1 class
private static double s_f1 = 1.0;
private static double s_delta1 = 1.0E-18;
private static int s_a1 = 3;
private static double s_b1 = (1.0 / 3.0);
private static void disableInline(ref int x) { }
//f1 and delta1 are static filed of a1 class
private static double doubleadd()
{
int i = 0;
disableInline(ref i);
return s_f1 + s_delta1;
}
private static double doublesub()
{
int i = 0;
disableInline(ref i);
return s_f1 - s_delta1;
}
private static double doublemul()
{
int i = 0;
disableInline(ref i);
return s_a1 * s_b1;
}
private static double doublediv()
{
int i = 0;
disableInline(ref i);
return s_f1 / s_a1;
}
private static double doubleadd_inline()
{
return s_f1 + s_delta1;
}
private static double doublesub_inline()
{
return s_f1 - s_delta1;
}
private static double doublemul_inline()
{
return s_a1 * s_b1;
}
private static double doublediv_inline()
{
return s_f1 / s_a1;
}
public static int Main()
{
bool pass = true;
double temp;
double[] arr = new double[3];
VT vt1;
CL cl1 = new CL();
//*** add ***
Console.WriteLine();
Console.WriteLine("***add***");
//local, in-line
if (((double)(s_f1 + s_delta1)) != s_f1)
{
Console.WriteLine("((double)(f1+delta1))!=f1");
pass = false;
}
//local
temp = s_f1 + s_delta1;
if (((double)temp) != s_f1)
{
Console.WriteLine("((double)temp)!=f1, temp=f1+delta1");
pass = false;
}
//method call
if (((double)doubleadd()) != s_f1)
{
Console.WriteLine("((double)doubleadd())!=f1");
pass = false;
}
//inline method call
if (((double)doubleadd_inline()) != s_f1)
{
Console.WriteLine("((double)doubleadd_inline())!=f1");
pass = false;
}
//array element
arr[0] = s_f1;
arr[1] = s_delta1;
arr[2] = arr[0] + arr[1];
if (((double)arr[2]) != s_f1)
{
Console.WriteLine("((double)arr[2])!=f1");
pass = false;
}
//struct
vt1.f1 = 1.0;
vt1.delta1 = 1.0E-18;
vt1.temp = vt1.f1 + vt1.delta1;
if (((double)vt1.temp) != s_f1)
{
Console.WriteLine("((double)vt1.temp)!=f1");
pass = false;
}
//class
cl1.temp = cl1.f1 + cl1.delta1;
if (((double)cl1.temp) != s_f1)
{
Console.WriteLine("((double)cl1.temp)!=f1");
pass = false;
}
//*** minus ***
Console.WriteLine();
Console.WriteLine("***sub***");
//local, in-line
if (((double)(s_f1 - s_delta1)) != s_f1)
{
Console.WriteLine("((double)(f1-delta1))!=f1");
pass = false;
}
//local
temp = s_f1 - s_delta1;
if (((double)temp) != s_f1)
{
Console.WriteLine("((double)temp)!=f1, temp=f1-delta1");
pass = false;
}
//method call
if (((double)doublesub()) != s_f1)
{
Console.WriteLine("((double)doublesub())!=f1");
pass = false;
}
//inline method call
if (((double)doublesub_inline()) != s_f1)
{
Console.WriteLine("((double)doublesub_inline())!=f1");
pass = false;
}
//array element
arr[0] = s_f1;
arr[1] = s_delta1;
arr[2] = arr[0] - arr[1];
if (((double)arr[2]) != s_f1)
{
Console.WriteLine("((double)arr[2])!=f1");
pass = false;
}
//struct
vt1.f1 = 1.0;
vt1.delta1 = 1.0E-18;
vt1.temp = vt1.f1 - vt1.delta1;
if (((double)vt1.temp) != s_f1)
{
Console.WriteLine("((double)vt1.temp)!=f1");
pass = false;
}
//class
cl1.temp = cl1.f1 - cl1.delta1;
if (((double)cl1.temp) != s_f1)
{
Console.WriteLine("((double)cl1.temp)!=f1");
pass = false;
}
//*** multiply ***
Console.WriteLine();
Console.WriteLine("***mul***");
//local, in-line
if (((double)(s_a1 * s_b1)) != s_f1)
{
Console.WriteLine("((double)(a1*b1))!=f1");
pass = false;
}
//local
temp = s_a1 * s_b1;
if (((double)temp) != s_f1)
{
Console.WriteLine("((double)temp)!=f1, temp=a1*b1");
pass = false;
}
//method call
if (((double)doublemul()) != s_f1)
{
Console.WriteLine("((double)doublemul())!=f1");
pass = false;
}
//inline method call
if (((double)doublemul_inline()) != s_f1)
{
Console.WriteLine("((double)doublemul_inline())!=f1");
pass = false;
}
//array element
arr[0] = s_a1;
arr[1] = s_b1;
arr[2] = arr[0] * arr[1];
if (((double)arr[2]) != s_f1)
{
Console.WriteLine("((double)arr[2])!=f1");
pass = false;
}
//struct
vt1.a1 = 3;
vt1.b1 = 1.0 / 3.0;
vt1.temp = vt1.a1 * vt1.b1;
if (((double)vt1.temp) != s_f1)
{
Console.WriteLine("((double)vt1.temp)!=f1");
pass = false;
}
//class
cl1.temp = cl1.a1 * cl1.b1;
if (((double)cl1.temp) != s_f1)
{
Console.WriteLine("((double)cl1.temp)!=f1");
pass = false;
}
//*** divide ***
Console.WriteLine();
Console.WriteLine("***div***");
//local, in-line
if (((double)(s_f1 / s_a1)) != s_b1)
{
Console.WriteLine("((double)(f1/a1))!=b1");
pass = false;
}
//local
temp = s_f1 / s_a1;
if (((double)temp) != s_b1)
{
Console.WriteLine("((double)temp)!=f1, temp=f1/a1");
pass = false;
}
//method call
if (((double)doublediv()) != s_b1)
{
Console.WriteLine("((double)doubledivl())!=b1");
pass = false;
}
//method call
if (((double)doublediv_inline()) != s_b1)
{
Console.WriteLine("((double)doublediv_inline())!=b1");
pass = false;
}
//array element
arr[0] = s_f1;
arr[1] = s_a1;
arr[2] = arr[0] / arr[1];
if (((double)arr[2]) != s_b1)
{
Console.WriteLine("((double)arr[2])!=b1");
pass = false;
}
//struct
vt1.f1 = 1.0;
vt1.a1 = 3;
vt1.temp = vt1.f1 / vt1.a1;
if (((double)vt1.temp) != s_b1)
{
Console.WriteLine("((double)vt1.temp)!=b1");
pass = false;
}
//class
cl1.temp = cl1.f1 / cl1.a1;
if (((double)cl1.temp) != s_b1)
{
Console.WriteLine("((double)cl1.temp)!=b1");
pass = false;
}
Console.WriteLine();
if (pass)
{
Console.WriteLine("SUCCESS");
return 100;
}
else
{
Console.WriteLine("FAILURE: double not truncated properly");
return 1;
}
}
}
| |
#region License
//
// CompositeMap.cs July 2007
//
// Copyright (C) 2007, Niall Gallagher <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
// implied. See the License for the specific language governing
// permissions and limitations under the License.
//
#endregion
#region Using directives
using SimpleFramework.Xml.Strategy;
using SimpleFramework.Xml.Stream;
using System.Collections.Generic;
using System;
#endregion
namespace SimpleFramework.Xml.Core {
/// <summary>
/// The <c>CompositeMap</c> is used to serialize and deserialize
/// maps to and from a source XML document. The structure of the map in
/// the XML format is determined by the annotation. Keys can be either
/// attributes or elements, and values can be inline. This can perform
/// serialization and deserialization of the key and value objects
/// whether the object types are primitive or composite.
/// </code>
/// <map>
/// <entry key='1'>
/// <value>one</value>
/// </entry>
/// <entry key='2'>
/// <value>two</value>
/// </entry>
/// </map>
/// </code>
/// For the above XML element map the element <c>entry</c> is
/// used to wrap the key and value such that they can be grouped. This
/// element does not represent any real object. The names of each of
/// the XML elements serialized and deserialized can be configured.
/// </summary>
/// <seealso>
/// SimpleFramework.Xml.Core.Entry
/// </seealso>
class CompositeMap : Converter {
/// <summary>
/// The factory used to create suitable map object instances.
/// </summary>
private readonly MapFactory factory;
/// <summary>
/// This is the type that the value objects are instances of.
/// </summary>
private readonly Converter value;
/// <summary>
/// This is the name of the entry wrapping the key and value.
/// </summary>
private readonly Converter key;
/// <summary>
/// This is the style used to style the names used for the XML.
/// </summary>
private readonly Style style;
/// <summary>
/// The entry object contains the details on how to write the map.
/// </summary>
private readonly Entry entry;
/// <summary>
/// Constructor for the <c>CompositeMap</c> object. This will
/// create a converter that is capable of writing map objects to
/// and from XML. The resulting XML is configured by an annotation
/// such that key values can attributes and values can be inline.
/// </summary>
/// <param name="context">
/// this is the root context for the serialization
/// </param>
/// <param name="entry">
/// this provides configuration for the resulting XML
/// </param>
/// <param name="type">
/// this is the map type that is to be converted
/// </param>
public CompositeMap(Context context, Entry entry, Type type) {
this.factory = new MapFactory(context, type);
this.value = entry.GetValue(context);
this.key = entry.GetKey(context);
this.style = context.getStyle();
this.entry = entry;
}
/// <summary>
/// This <c>read</c> method will read the XML element map from
/// the provided node and deserialize its children as entry types.
/// Each entry type must contain a key and value so that the entry
/// can be inserted in to the map as a pair. If either the key or
/// value is composite it is read as a root object, which means its
/// <c>Root</c> annotation must be present and the name of the
/// object element must match that root element name.
/// </summary>
/// <param name="node">
/// this is the XML element that is to be deserialized
/// </param>
/// <returns>
/// this returns the item to attach to the object contact
/// </returns>
public Object Read(InputNode node) {
Instance type = factory.GetInstance(node);
Object map = type.Instance;
if(!type.isReference()) {
return Populate(node, map);
}
return map;
}
/// <summary>
/// This <c>read</c> method will read the XML element map from
/// the provided node and deserialize its children as entry types.
/// Each entry type must contain a key and value so that the entry
/// can be inserted in to the map as a pair. If either the key or
/// value is composite it is read as a root object, which means its
/// <c>Root</c> annotation must be present and the name of the
/// object element must match that root element name.
/// </summary>
/// <param name="node">
/// this is the XML element that is to be deserialized
/// </param>
/// <param name="result">
/// this is the map object that is to be populated
/// </param>
/// <returns>
/// this returns the item to attach to the object contact
/// </returns>
public Object Read(InputNode node, Object result) {
Instance type = factory.GetInstance(node);
if(type.isReference()) {
return type.Instance;
}
type.setInstance(result);
if(result != null) {
return Populate(node, result);
}
return result;
}
/// <summary>
/// This <c>populate</c> method will read the XML element map
/// from the provided node and deserialize its children as entry types.
/// Each entry type must contain a key and value so that the entry
/// can be inserted in to the map as a pair. If either the key or
/// value is composite it is read as a root object, which means its
/// <c>Root</c> annotation must be present and the name of the
/// object element must match that root element name.
/// </summary>
/// <param name="node">
/// this is the XML element that is to be deserialized
/// </param>
/// <param name="result">
/// this is the map object that is to be populated
/// </param>
/// <returns>
/// this returns the item to attach to the object contact
/// </returns>
public Object Populate(InputNode node, Object result) {
Dictionary map = (Map) result;
while(true) {
InputNode next = node.getNext();
if(next == null) {
return map;
}
Object index = key.Read(next);
Object item = value.Read(next);
map.put(index, item);
}
}
/// <summary>
/// This <c>validate</c> method will validate the XML element
/// map from the provided node and validate its children as entry
/// types. Each entry type must contain a key and value so that the
/// entry can be inserted in to the map as a pair. If either the key
/// or value is composite it is read as a root object, which means its
/// <c>Root</c> annotation must be present and the name of the
/// object element must match that root element name.
/// </summary>
/// <param name="node">
/// this is the XML element that is to be validate
/// </param>
/// <returns>
/// true if the element matches the XML schema class given
/// </returns>
public bool Validate(InputNode node) {
Instance value = factory.GetInstance(node);
if(!value.isReference()) {
Object result = value.setInstance(null);
Class type = value.getType();
return Validate(node, type);
}
return true;
}
/// <summary>
/// This <c>validate</c> method will validate the XML element
/// map from the provided node and validate its children as entry
/// types. Each entry type must contain a key and value so that the
/// entry can be inserted in to the map as a pair. If either the key
/// or value is composite it is read as a root object, which means its
/// <c>Root</c> annotation must be present and the name of the
/// object element must match that root element name.
/// </summary>
/// <param name="node">
/// this is the XML element that is to be validate
/// </param>
/// <param name="type">
/// this is the type to validate the input node against
/// </param>
/// <returns>
/// true if the element matches the XML schema class given
/// </returns>
public bool Validate(InputNode node, Class type) {
while(true) {
InputNode next = node.getNext();
if(next == null) {
return true;
}
if(!key.Validate(next)) {
return false;
}
if(!value.Validate(next)) {
return false;
}
}
}
/// <summary>
/// This <c>write</c> method will write the key value pairs
/// within the provided map to the specified XML node. This will
/// write each entry type must contain a key and value so that
/// the entry can be deserialized in to the map as a pair. If the
/// key or value object is composite it is read as a root object
/// so its <c>Root</c> annotation must be present.
/// </summary>
/// <param name="node">
/// this is the node the map is to be written to
/// </param>
/// <param name="source">
/// this is the source map that is to be written
/// </param>
public void Write(OutputNode node, Object source) {
Dictionary map = (Map) source;
for(Object index : map.keySet()) {
String root = entry.Entry;
String name = style.GetElement(root);
OutputNode next = node.getChild(name);
Object item = map.get(index);
key.Write(next, index);
value.Write(next, item);
}
}
}
}
| |
using System;
using OpenADK.Library;
using OpenADK.Library.Infra;
using NUnit.Framework;
using Library.UnitTesting.Framework;
namespace Library.NUnit.Core.Library
{
[TestFixture]
public class SIFPrimitivesTests : InMemoryProtocolTest, ISubscriber
{
[TearDown]
public override void TearDown()
{
base.TearDown();
// Clean up any properties that have been set and reset the ADK version
Adk.SifVersion = SifVersion.LATEST;
Agent.Properties.Clear();
}
[Test]
public void testRegisterSIF20()
{
Adk.SifVersion = (SifVersion.SIF20);
String iconURL = "http://acme.foo.bar/ico";
AgentProperties props = Agent.Properties;
props.AgentIconUrl = iconURL;
Agent.Name = "acmeAgent";
props.AgentVendor = "acmeVendor";
props.AgentVersion = "2.6.5.8";
props.ApplicationName = "acmeApp";
props.ApplicationVendor = "acme<>AppVendor";
props.ApplicationVersion = "10.2";
Zone.Connect( ProvisioningFlags.Register );
InMemoryProtocolHandler handler = (InMemoryProtocolHandler) Zone.ProtocolHandler;
SIF_Register sr = (SIF_Register) handler.readMsg();
Assert.AreEqual( Agent.Id, sr.SourceId, "SourceID" );
Assert.AreEqual( "acmeAgent", sr.SIF_Name, "Name" );
Assert.AreEqual( "acmeVendor", sr.SIF_NodeVendor, "Agent Vendor" );
Assert.AreEqual( "2.6.5.8", sr.SIF_NodeVersion, "Agent Version" );
SIF_Application appInfo = sr.SIF_Application;
Assert.IsNotNull( appInfo );
Assert.AreEqual( "acmeApp", appInfo.SIF_Product, "App Name" );
Assert.AreEqual( "acme<>AppVendor", appInfo.SIF_Vendor, "App Vendor" );
Assert.AreEqual( "10.2", appInfo.SIF_Version, "App Version" );
Assert.AreEqual( iconURL, sr.SIF_Icon, "Icon" );
}
[Test]
public void testRegisterOverrideZISVersion()
{
Adk.SifVersion = (SifVersion.SIF20);
AgentProperties props = Agent.Properties;
props.OverrideSifVersions = "1.1, 2.5";
Zone.Connect(ProvisioningFlags.Register);
InMemoryProtocolHandler handler = (InMemoryProtocolHandler)Zone.ProtocolHandler;
SIF_Register sr = (SIF_Register)handler.readMsg();
SIF_Version[] versions = sr.GetSIF_Versions();
Assert.IsNotNull( versions );
Assert.AreEqual( 2, versions.Length );
Assert.AreEqual( "1.1", versions[0].Value );
Assert.AreEqual("2.5", versions[1].Value);
}
[Test]
public void testRegisterSIF15r1()
{
Adk.SifVersion = (SifVersion.SIF15r1);
String iconURL = "http://acme.foo.bar/ico";
AgentProperties props = Agent.Properties;
props.AgentIconUrl = iconURL;
Agent.Name = "acmeAgent";
props.AgentVendor = "acmeVendor";
props.AgentVersion = "2.6.5.8";
props.ApplicationName = "acmeApp";
props.ApplicationVendor = "acme<>AppVendor";
props.ApplicationVersion = "10.2";
Zone.Connect( ProvisioningFlags.Register );
InMemoryProtocolHandler handler = (InMemoryProtocolHandler) Zone.ProtocolHandler;
SIF_Register sr = (SIF_Register) handler.readMsg();
Assert.AreEqual( Agent.Id, sr.SourceId, "SourceID" );
Assert.AreEqual( "acmeAgent", sr.SIF_Name, "Name" );
Assert.IsNull( sr.SIF_NodeVendor, "Agent Vendor" );
Assert.IsNull( sr.SIF_NodeVersion, "Agent Version" );
SIF_Application appInfo = sr.SIF_Application;
Assert.IsNull( appInfo );
Assert.IsNull( sr.SIF_Icon, "Icon" );
// Assert the versions in the message. If the ADK is initialized to
// SIF 1.5r1, it should not be sending any versions that start with a
// "2"
SifVersion messageVersion = sr.SifVersion;
Assert.AreEqual( SifVersion.SIF15r1, messageVersion, "Should be version 1.5r1" );
foreach ( SIF_Version version in sr.GetSIF_Versions() )
{
String versionString = version.TextValue;
Assert.IsTrue( versionString.StartsWith( "1" ), "Should start with 1 but was " + versionString );
}
}
/**
*
*/
[Test]
public void testSIFPingDifferentVersions()
{
Adk.SifVersion = (SifVersion.LATEST);
Zone.Connect( ProvisioningFlags.None );
InMemoryProtocolHandler handler = (InMemoryProtocolHandler) Zone.ProtocolHandler;
handler.clear();
Zone.SifPing();
SIF_SystemControl ssc = (SIF_SystemControl) handler.readMsg();
Assert.AreEqual( SifVersion.LATEST, ssc.SifVersion, "SifVersion" );
Assert.AreEqual( SifVersion.LATEST.Xmlns, ssc.GetXmlns(), "SifVersion->Xmlns" );
foreach ( SifVersion version in Adk.SupportedSIFVersions )
{
// This may seem strange, but the ADK sometimes has a SIF version in the list of
// supported versions that is not fully supported yet (e.g. preparing the ADK for
// the next version. Because of that, only test SIF_Ping with versions if they
// are equal to or less than SifVersion.LATEST
if ( version.CompareTo( SifVersion.LATEST ) <= 0 )
{
testSIFPingWithZISVersion( handler, version );
}
}
}
/**
*
*/
[Test]
public void testSynchronousGetZoneStatus()
{
Adk.SifVersion = (SifVersion.LATEST);
Zone.Connect( ProvisioningFlags.None );
InMemoryProtocolHandler handler = (InMemoryProtocolHandler) Zone.ProtocolHandler;
Zone.Properties.UseZoneStatusSystemControl = true;
Zone.Properties.ZisVersion = SifVersion.SIF15r1.ToString();
handler.clear();
SIF_ZoneStatus szs = Zone.GetZoneStatus();
SIF_SystemControl ssc = (SIF_SystemControl) handler.readMsg();
Assert.AreEqual( SifVersion.SIF15r1, ssc.SifVersion, "SifVersion" );
Assert.AreEqual( SifVersion.SIF15r1.Xmlns, ssc.GetXmlns(), "SifVersion->Xmlns" );
SifElement element = ssc.SIF_SystemControlData.GetChildList()[0];
Assert.IsNotNull( element, "SIF_SystemControlData\\Child" );
Assert.IsTrue( element is SIF_GetZoneStatus, "is instanceof SIF_GetZoneStatus" );
}
/**
*
*/
[Test]
public void testAsynchronousGetZoneStatus()
{
Adk.SifVersion = (SifVersion.LATEST);
Zone.Connect( ProvisioningFlags.None );
InMemoryProtocolHandler handler = (InMemoryProtocolHandler) Zone.ProtocolHandler;
Zone.Properties.UseZoneStatusSystemControl = false;
Zone.Properties.ZisVersion = SifVersion.SIF15r1.ToString();
handler.clear();
try
{
// We expect a SIF XML Error exception in this case because
// our handler doesn't return a valid response back to a pull message
Zone.GetZoneStatus();
}
catch ( SifException sifEx )
{
Assert.AreEqual( SifErrorCategoryCode.Xml, sifEx.ErrorCategory );
}
SIF_Request sr = (SIF_Request) handler.readMsg();
Assert.AreEqual( SifVersion.SIF15r1, sr.SifVersion, "SifVersion" );
Assert.AreEqual( SifVersion.SIF15r1.Xmlns, sr.GetXmlns(), "SifVersion->Xmlns" );
}
private void testSIFPingWithZISVersion( InMemoryProtocolHandler handler, SifVersion testVersion )
{
SIF_SystemControl ssc;
Zone.Properties.ZisVersion = testVersion.ToString();
Zone.SifPing();
ssc = (SIF_SystemControl) handler.readMsg();
Assert.AreEqual( testVersion, ssc.SifVersion, "SifVersion" );
Assert.AreEqual( testVersion.Xmlns, ssc.GetXmlns(), "SifVersion->Xmlns" );
}
/**
* Tests registering with the ADK version set to 2.0 or greater, but the
* AgentProperties.getZISVersion() property set to 1.5r1. This should result
* in the SIF_Register message being sent in 1.5r1
* @throws Exception
*/
[Test]
public void testSIFRegisterZISVersion15r1()
{
Adk.SifVersion = (SifVersion.LATEST);
String iconURL = "http://acme.foo.bar/ico";
AgentProperties props = Agent.Properties;
// Set the ZIS Version to 1.5r1
props.ZisVersion = SifVersion.SIF15r1.ToString();
props.AgentIconUrl = iconURL;
Agent.Name = "acmeAgent";
props.AgentVendor = "acmeVendor";
props.AgentVersion = "2.6.5.8";
props.ApplicationName = "acmeApp";
props.ApplicationVendor = "acme<>AppVendor";
props.ApplicationVersion = "10.2";
Zone.Connect( ProvisioningFlags.Register );
InMemoryProtocolHandler handler = (InMemoryProtocolHandler) Zone.ProtocolHandler;
SIF_Register sr = (SIF_Register) handler.readMsg();
Assert.AreEqual( SifVersion.SIF15r1, sr.SifVersion, "SifVersion" );
Assert.AreEqual( SifVersion.SIF15r1.Xmlns, sr.GetXmlns(), "SifVersion->Xmlns" );
Assert.AreEqual( Agent.Id, sr.SourceId, "SourceID" );
Assert.AreEqual( "acmeAgent", sr.SIF_Name, "Name" );
Assert.IsNull( sr.SIF_NodeVendor, "Agent Vendor" );
Assert.IsNull( sr.SIF_NodeVersion, "Agent Version" );
SIF_Application appInfo = sr.SIF_Application;
Assert.IsNull( appInfo );
Assert.IsNull( sr.SIF_Icon, "Icon" );
// Assert the versions in the message. If the ADK is initialized to
// SIF 1.5r1, it should not be sending any versions that start with a
// "2"
SifVersion messageVersion = sr.SifVersion;
Assert.AreEqual( SifVersion.SIF15r1, messageVersion, "Should be version 1.5r1" );
foreach ( SIF_Version version in sr.GetSIF_Versions() )
{
String versionString = version.TextValue;
Assert.IsTrue( versionString.StartsWith( "1" ), "Should start with 1 but was " + versionString );
}
}
/**
*
*/
[Test]
public void testProvisioningSIF20()
{
String[] expectedMessages =
new String[] {"SIF_Register", "SIF_SystemControl", "SIF_SystemControl", "SIF_Provision"};
Adk.SifVersion = (SifVersion.LATEST);
Zone.SetSubscriber( this, InfraDTD.SIF_AGENTACL, null );
assertMessagesInVersion( SifVersion.LATEST, expectedMessages );
}
/**
*
*/
[Test]
public void testProvisioningSIF15r1()
{
String[] expectedMessages = new String[] {"SIF_Register", "SIF_SystemControl", "SIF_Subscribe"};
Adk.SifVersion = (SifVersion.SIF15r1);
Zone.SetSubscriber(this, InfraDTD.SIF_AGENTACL, null);
assertMessagesInVersion( SifVersion.SIF15r1, expectedMessages );
}
/**
*
*/
[Test]
public void testProvisioningZIS15r1()
{
String[] expectedMessages = new String[] {"SIF_Register", "SIF_SystemControl", "SIF_Subscribe"};
Adk.SifVersion = (SifVersion.LATEST);
Zone.Properties.ZisVersion = SifVersion.SIF15r1.ToString();
Zone.SetSubscriber(this, InfraDTD.SIF_AGENTACL, null);
assertMessagesInVersion( SifVersion.SIF15r1, expectedMessages );
}
private void assertMessagesInVersion( SifVersion version, String[] expectedMessages )
{
Zone.Connect( ProvisioningFlags.Register );
InMemoryProtocolHandler handler = (InMemoryProtocolHandler) Zone.ProtocolHandler;
for ( int a = 0; a < expectedMessages.Length; a++ )
{
SifMessagePayload smp = (SifMessagePayload) handler.readMsg();
Assert.AreEqual( expectedMessages[a], smp.Tag, "Should be a " + expectedMessages[a] );
Assert.AreEqual( version, smp.SifVersion, "Version should be " + version );
}
Assert.IsNull( handler.readMsg(), " Should have no more messages " );
}
#region ISubscriber Members
/// <summary> Respond to a SIF_Event received from a zone.</summary>
/// <param name="evnt">The event data</param>
/// <param name="zone">The zone from which this event originated</param>
/// <param name="info">Information about the SIF_Event message</param>
public void OnEvent( Event evnt, IZone zone, IMessageInfo info )
{
throw new NotImplementedException();
}
#endregion
}
}
| |
//-----------------------------------------------------------------------
// <copyright file="MSMQ.cs">(c) http://www.codeplex.com/MSBuildExtensionPack. This source is subject to the Microsoft Permissive License. See http://www.microsoft.com/resources/sharedsource/licensingbasics/sharedsourcelicenses.mspx. All other rights reserved.</copyright>
//-----------------------------------------------------------------------
namespace MSBuild.ExtensionPack.Communication
{
using System;
using System.Globalization;
using System.Messaging;
using Microsoft.Build.Framework;
/// <summary>
/// <b>Valid TaskActions are:</b>
/// <para><i>Create</i> (<b>Required: </b> Path <b>Optional: Label, Transactional, Authenticated, MaximumQueueSize, MaximumJournalSize, UseJournalQueue, Force, Privacy</b> )</para>
/// <para><i>CheckExists</i> (<b>Required: </b> Path <b>Output: Exists</b> )</para>
/// <para><i>Delete</i> (<b>Required: </b> Path <b>Optional: </b> )</para>
/// <para><i>Send</i> (<b>Required: </b> Path <b>Optional: Message, Label</b> )</para>
/// <para><i>SetPermissions</i> (<b>Required: </b> Path <b>Optional: Allow, Deny, Revoke, Set</b> )</para>
/// <para><b>Remote Execution Support:</b> No</para>
/// </summary>
/// <example>
/// <code lang="xml"><![CDATA[
/// <Project ToolsVersion="3.5" DefaultTargets="Default" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
/// <PropertyGroup>
/// <TPath>$(MSBuildProjectDirectory)\..\MSBuild.ExtensionPack.tasks</TPath>
/// <TPath Condition="Exists('$(MSBuildProjectDirectory)\..\..\Common\MSBuild.ExtensionPack.tasks')">$(MSBuildProjectDirectory)\..\..\Common\MSBuild.ExtensionPack.tasks</TPath>
/// </PropertyGroup>
/// <Import Project="$(TPath)"/>
/// <Target Name="Default">
/// <ItemGroup>
/// <Allow Include="TFS">
/// <Permissions>DeleteMessage,ReceiveMessage</Permissions>
/// </Allow>
/// <Deny Include="TFS">
/// <Permissions>GetQueueProperties</Permissions>
/// </Deny>
/// </ItemGroup>
/// <!-- Create queue -->
/// <MSBuild.ExtensionPack.Communication.MSMQ TaskAction="Create" Path=".\private$\3" Label="Test Queue" Force="true"/>
/// <!-- Check if the queue exists -->
/// <MSBuild.ExtensionPack.Communication.MSMQ TaskAction="CheckExists" Path=".\private$\3">
/// <Output TaskParameter="Exists" PropertyName="DoesExist"/>
/// </MSBuild.ExtensionPack.Communication.MSMQ>
/// <Message Text="Exists: $(DoesExist)"/>
/// <!-- Delete the queue -->
/// <MSBuild.ExtensionPack.Communication.MSMQ TaskAction="Delete" Path=".\private$\3"/>
/// <!-- Check if the queue exists -->
/// <MSBuild.ExtensionPack.Communication.MSMQ TaskAction="CheckExists" Path=".\private$\3">
/// <Output TaskParameter="Exists" PropertyName="DoesExist"/>
/// </MSBuild.ExtensionPack.Communication.MSMQ>
/// <Message Text="Exists: $(DoesExist)"/>
/// <!-- Delete the queue again to see that no error is thrown -->
/// <MSBuild.ExtensionPack.Communication.MSMQ TaskAction="Delete" Path=".\private$\3"/>
/// <!-- Create queue -->
/// <MSBuild.ExtensionPack.Communication.MSMQ TaskAction="Create" Path=".\private$\3" Label="Test Queue" Force="true" Transactional="false" Authenticated="" MaximumQueueSize="220"/>
/// <!-- Send Message -->
/// <MSBuild.ExtensionPack.Communication.MSMQ TaskAction="Send" Path=".\private$\3" Message="Mike" Label="Hi2"/>
/// <!-- Send Message -->
/// <MSBuild.ExtensionPack.Communication.MSMQ TaskAction="Send" Path=".\private$\3" Message="" Label=""/>
/// <!-- Set permissions -->
/// <MSBuild.ExtensionPack.Communication.MSMQ TaskAction="SetPermissions" Path=".\private$\3" Allow="@(Allow)" Deny="@(Deny)"/>
/// </Target>
/// </Project>
/// ]]></code>
/// </example>
[HelpUrl("http://www.msbuildextensionpack.com/help/3.5.12.0/html/9bf7c511-7410-c9e9-e1bc-570d3dfdcc0e.htm")]
public class MSMQ : BaseTask
{
private const string CreateTaskAction = "Create";
private const string DeleteTaskAction = "Delete";
private const string CheckExistsTaskAction = "CheckExists";
private const string SendTaskAction = "Send";
private const string SetPermissionsTaskAction = "SetPermissions";
private System.Messaging.EncryptionRequired privacy = System.Messaging.EncryptionRequired.Optional;
/// <summary>
/// Sets the path of the queue. Required.
/// </summary>
[Required]
[TaskAction(CreateTaskAction, true)]
[TaskAction(DeleteTaskAction, true)]
[TaskAction(CheckExistsTaskAction, true)]
[TaskAction(SendTaskAction, true)]
[TaskAction(SetPermissionsTaskAction, true)]
public string Path { get; set; }
/// <summary>
/// Sets the Label of the queue
/// </summary>
[TaskAction(CreateTaskAction, false)]
[TaskAction(SendTaskAction, false)]
public string Label { get; set; }
/// <summary>
/// Sets the Message to send to the queue
/// </summary>
[TaskAction(SendTaskAction, true)]
public string Message { get; set; }
/// <summary>
/// An access-allowed entry that causes the new rights to be added to any existing rights the trustee has. Permission metadata supports: DeleteMessage, PeekMessage, WriteMessage, DeleteJournalMessage, SetQueueProperties, GetQueueProperties, DeleteQueue, GetQueuePermissions, ChangeQueuePermissions, TakeQueueOwnership, ReceiveMessage, ReceiveJournalMessage, GenericRead, GenericWrite, FullControl
/// </summary>
[TaskAction(SetPermissionsTaskAction, false)]
public ITaskItem[] Allow { get; set; }
/// <summary>
/// An access-denied entry that denies the specified rights in addition to any currently denied rights of the trustee. Permission metadata supports: DeleteMessage, PeekMessage, WriteMessage, DeleteJournalMessage, SetQueueProperties, GetQueueProperties, DeleteQueue, GetQueuePermissions, ChangeQueuePermissions, TakeQueueOwnership, ReceiveMessage, ReceiveJournalMessage, GenericRead, GenericWrite, FullControl
/// </summary>
[TaskAction(SetPermissionsTaskAction, false)]
public ITaskItem[] Deny { get; set; }
/// <summary>
/// An entry that removes all existing allowed or denied rights for the specified trustee. Permission metadata supports: DeleteMessage, PeekMessage, WriteMessage, DeleteJournalMessage, SetQueueProperties, GetQueueProperties, DeleteQueue, GetQueuePermissions, ChangeQueuePermissions, TakeQueueOwnership, ReceiveMessage, ReceiveJournalMessage, GenericRead, GenericWrite, FullControl
/// </summary>
[TaskAction(SetPermissionsTaskAction, false)]
public ITaskItem[] Revoke { get; set; }
/// <summary>
/// An access-allowed entry that is similar to Allow, except that the new entry allows only the specified rights. Using it discards any existing rights, including all existing access-denied entries for the trustee. Permission metadata supports: DeleteMessage, PeekMessage, WriteMessage, DeleteJournalMessage, SetQueueProperties, GetQueueProperties, DeleteQueue, GetQueuePermissions, ChangeQueuePermissions, TakeQueueOwnership, ReceiveMessage, ReceiveJournalMessage, GenericRead, GenericWrite, FullControl
/// </summary>
[TaskAction(SetPermissionsTaskAction, false)]
public ITaskItem[] Set { get; set; }
/// <summary>
/// Set true to create a transactional queue; false to create a non-transactional queue. Default is false;
/// </summary>
[TaskAction(CreateTaskAction, false)]
public bool Transactional { get; set; }
/// <summary>
/// Set to try to create an Authenticated queueu. Default is false
/// </summary>
[TaskAction(CreateTaskAction, false)]
public bool Authenticated { get; set; }
/// <summary>
/// Set to true to recreate a queue if it already exists
/// </summary>
[TaskAction(CreateTaskAction, false)]
public bool Force { get; set; }
/// <summary>
/// Sets the maximum queue size in kb.
/// </summary>
[TaskAction(CreateTaskAction, false)]
public int MaximumQueueSize { get; set; }
/// <summary>
/// Sets the maximum journal size in kb.
/// </summary>
[TaskAction(CreateTaskAction, false)]
public int MaximumJournalSize { get; set; }
/// <summary>
/// Set to true to use the journal queue
/// </summary>
[TaskAction(CreateTaskAction, false)]
public bool UseJournalQueue { get; set; }
/// <summary>
/// You can specify whether the queue accepts private (encrypted) messages, non-private (non-encrypted) messages, or both. Supports Optional (default), None, Both.
/// </summary>
public string Privacy
{
get { return this.privacy.ToString(); }
set { this.privacy = (System.Messaging.EncryptionRequired)Enum.Parse(typeof(System.Messaging.EncryptionRequired), value); }
}
/// <summary>
/// Gets whether the queue exists
/// </summary>
[Output]
[TaskAction(CheckExistsTaskAction, false)]
public bool Exists { get; set; }
[DropdownValue(CreateTaskAction)]
[DropdownValue(DeleteTaskAction)]
[DropdownValue(CheckExistsTaskAction)]
[DropdownValue(SendTaskAction)]
[DropdownValue(SetPermissionsTaskAction)]
public override string TaskAction
{
get { return base.TaskAction; }
set { base.TaskAction = value; }
}
/// <summary>
/// Performs the action of this task.
/// </summary>
protected override void InternalExecute()
{
switch (this.TaskAction)
{
case CreateTaskAction:
this.Create();
break;
case CheckExistsTaskAction:
this.CheckExists();
break;
case DeleteTaskAction:
this.Delete();
break;
case SendTaskAction:
this.Send();
break;
case SetPermissionsTaskAction:
this.SetPermissions();
break;
default:
this.Log.LogError(string.Format(CultureInfo.CurrentCulture, "Invalid TaskAction passed: {0}", this.TaskAction));
return;
}
}
private void SetPermissions()
{
if (System.Messaging.MessageQueue.Exists(this.Path))
{
this.LogTaskMessage(string.Format(CultureInfo.CurrentCulture, "Setting permissions on queue: {0}", this.Path));
using (System.Messaging.MessageQueue queue = new System.Messaging.MessageQueue(this.Path))
{
if (this.Allow != null)
{
foreach (ITaskItem i in this.Allow)
{
MessageQueueAccessRights permission = (MessageQueueAccessRights)Enum.Parse(typeof(MessageQueueAccessRights), i.GetMetadata("Permissions"), true);
this.LogTaskMessage(MessageImportance.Low, string.Format(CultureInfo.CurrentCulture, "Allow permission for user {0} - {1}", i.ItemSpec, i.GetMetadata("Permissions")));
queue.SetPermissions(i.ItemSpec, permission, AccessControlEntryType.Allow);
}
}
if (this.Deny != null)
{
foreach (ITaskItem i in this.Deny)
{
MessageQueueAccessRights permission = (MessageQueueAccessRights)Enum.Parse(typeof(MessageQueueAccessRights), i.GetMetadata("Permissions"), true);
this.LogTaskMessage(MessageImportance.Low, string.Format(CultureInfo.CurrentCulture, "Deny permission for user {0} - {1}", i.ItemSpec, i.GetMetadata("Permissions")));
queue.SetPermissions(i.ItemSpec, permission, AccessControlEntryType.Deny);
}
}
if (this.Set != null)
{
foreach (ITaskItem i in this.Set)
{
MessageQueueAccessRights permission = (MessageQueueAccessRights)Enum.Parse(typeof(MessageQueueAccessRights), i.GetMetadata("Permissions"), true);
this.LogTaskMessage(MessageImportance.Low, string.Format(CultureInfo.CurrentCulture, "Set permission for user {0} - {1}", i.ItemSpec, i.GetMetadata("Permissions")));
queue.SetPermissions(i.ItemSpec, permission, AccessControlEntryType.Set);
}
}
if (this.Revoke != null)
{
foreach (ITaskItem i in this.Revoke)
{
MessageQueueAccessRights permission = (MessageQueueAccessRights)Enum.Parse(typeof(MessageQueueAccessRights), i.GetMetadata("Permissions"), true);
this.LogTaskMessage(MessageImportance.Low, string.Format(CultureInfo.CurrentCulture, "Revoke permission for user {0} - {1}", i.ItemSpec, i.GetMetadata("Permissions")));
queue.SetPermissions(i.ItemSpec, permission, AccessControlEntryType.Revoke);
}
}
}
}
else
{
this.Log.LogError(string.Format(CultureInfo.CurrentCulture, "Queue not found: {0}", this.Path));
return;
}
}
private void Create()
{
this.LogTaskMessage(string.Format(CultureInfo.CurrentCulture, "Creating queue: {0}", this.Path));
if (System.Messaging.MessageQueue.Exists(this.Path))
{
if (this.Force)
{
this.LogTaskMessage(string.Format(CultureInfo.CurrentCulture, "Deleting existing queue: {0}", this.Path));
System.Messaging.MessageQueue.Delete(this.Path);
}
else
{
this.Log.LogError(string.Format(CultureInfo.CurrentCulture, "Queue already exists. Use Force=\"true\" to delete the existing queue: {0}", this.Path));
return;
}
}
using (System.Messaging.MessageQueue q = System.Messaging.MessageQueue.Create(this.Path, this.Transactional))
{
if (!string.IsNullOrEmpty(this.Label))
{
q.Label = this.Label;
}
if (this.Authenticated)
{
q.Authenticate = true;
}
if (this.MaximumQueueSize > 0)
{
q.MaximumQueueSize = this.MaximumQueueSize;
}
if (this.UseJournalQueue)
{
q.UseJournalQueue = true;
if (this.MaximumJournalSize > 0)
{
q.MaximumJournalSize = this.MaximumJournalSize;
}
}
q.EncryptionRequired = this.privacy;
}
}
private void CheckExists()
{
this.LogTaskMessage(string.Format(CultureInfo.CurrentCulture, "Checking whether queue exists: {0}", this.Path));
this.Exists = System.Messaging.MessageQueue.Exists(this.Path);
}
private void Send()
{
if (System.Messaging.MessageQueue.Exists(this.Path))
{
if (string.IsNullOrEmpty(this.Label))
{
this.Label = string.Empty;
}
this.LogTaskMessage(string.Format(CultureInfo.CurrentCulture, "Sending message to queue: [{0}] - {1}", this.Path, this.Message));
// Connect to a queue on the local computer.
using (System.Messaging.MessageQueue queue = new System.Messaging.MessageQueue(this.Path))
{
// Send a message to the queue.
if (this.Transactional)
{
// Create a transaction.
using (MessageQueueTransaction myTransaction = new MessageQueueTransaction())
{
// Begin the transaction.
myTransaction.Begin();
// Send the message.
queue.Send(this.Message, this.Label, myTransaction);
// Commit the transaction.
myTransaction.Commit();
}
}
else
{
queue.Send(this.Message, this.Label);
}
}
}
else
{
this.Log.LogError(string.Format(CultureInfo.CurrentCulture, "Queue not found: {0}", this.Path));
return;
}
}
private void Delete()
{
if (System.Messaging.MessageQueue.Exists(this.Path))
{
this.LogTaskMessage(string.Format(CultureInfo.CurrentCulture, "Deleting queue: {0}", this.Path));
System.Messaging.MessageQueue.Delete(this.Path);
}
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Data;
using Mono.Data.SqliteClient;
namespace OpenSim.Data.SQLite
{
/// <summary>
/// A base class for methods needed by all SQLite database classes
/// </summary>
public class SQLiteUtil
{
/***********************************************************************
*
* Database Definition Helper Functions
*
* This should be db agnostic as we define them in ADO.NET terms
*
**********************************************************************/
/// <summary>
///
/// </summary>
/// <param name="dt"></param>
/// <param name="name"></param>
/// <param name="type"></param>
public static void createCol(DataTable dt, string name, Type type)
{
DataColumn col = new DataColumn(name, type);
dt.Columns.Add(col);
}
/***********************************************************************
*
* SQL Statement Creation Functions
*
* These functions create SQL statements for update, insert, and create.
* They can probably be factored later to have a db independant
* portion and a db specific portion
*
**********************************************************************/
/// <summary>
/// Create an insert command
/// </summary>
/// <param name="table">table name</param>
/// <param name="dt">data table</param>
/// <returns>the created command</returns>
/// <remarks>
/// This is subtle enough to deserve some commentary.
/// Instead of doing *lots* and *lots of hardcoded strings
/// for database definitions we'll use the fact that
/// realistically all insert statements look like "insert
/// into A(b, c) values(:b, :c) on the parameterized query
/// front. If we just have a list of b, c, etc... we can
/// generate these strings instead of typing them out.
/// </remarks>
public static SqliteCommand createInsertCommand(string table, DataTable dt)
{
string[] cols = new string[dt.Columns.Count];
for (int i = 0; i < dt.Columns.Count; i++)
{
DataColumn col = dt.Columns[i];
cols[i] = col.ColumnName;
}
string sql = "insert into " + table + "(";
sql += String.Join(", ", cols);
// important, the first ':' needs to be here, the rest get added in the join
sql += ") values (:";
sql += String.Join(", :", cols);
sql += ")";
SqliteCommand cmd = new SqliteCommand(sql);
// this provides the binding for all our parameters, so
// much less code than it used to be
foreach (DataColumn col in dt.Columns)
{
cmd.Parameters.Add(createSqliteParameter(col.ColumnName, col.DataType));
}
return cmd;
}
/// <summary>
/// create an update command
/// </summary>
/// <param name="table">table name</param>
/// <param name="pk"></param>
/// <param name="dt"></param>
/// <returns>the created command</returns>
public static SqliteCommand createUpdateCommand(string table, string pk, DataTable dt)
{
string sql = "update " + table + " set ";
string subsql = String.Empty;
foreach (DataColumn col in dt.Columns)
{
if (subsql.Length > 0)
{
// a map function would rock so much here
subsql += ", ";
}
subsql += col.ColumnName + "= :" + col.ColumnName;
}
sql += subsql;
sql += " where " + pk;
SqliteCommand cmd = new SqliteCommand(sql);
// this provides the binding for all our parameters, so
// much less code than it used to be
foreach (DataColumn col in dt.Columns)
{
cmd.Parameters.Add(createSqliteParameter(col.ColumnName, col.DataType));
}
return cmd;
}
/// <summary>
///
/// </summary>
/// <param name="dt">Data Table</param>
/// <returns></returns>
public static string defineTable(DataTable dt)
{
string sql = "create table " + dt.TableName + "(";
string subsql = String.Empty;
foreach (DataColumn col in dt.Columns)
{
if (subsql.Length > 0)
{
// a map function would rock so much here
subsql += ",\n";
}
subsql += col.ColumnName + " " + sqliteType(col.DataType);
if (dt.PrimaryKey.Length > 0)
{
if (col == dt.PrimaryKey[0])
{
subsql += " primary key";
}
}
}
sql += subsql;
sql += ")";
return sql;
}
/***********************************************************************
*
* Database Binding functions
*
* These will be db specific due to typing, and minor differences
* in databases.
*
**********************************************************************/
///<summary>
/// <para>
/// This is a convenience function that collapses 5 repetitive
/// lines for defining SqliteParameters to 2 parameters:
/// column name and database type.
/// </para>
///
/// <para>
/// It assumes certain conventions like :param as the param
/// name to replace in parametrized queries, and that source
/// version is always current version, both of which are fine
/// for us.
/// </para>
///</summary>
/// <param name="name"></param>
/// <param name="type"></param>
///<returns>a built sqlite parameter</returns>
public static SqliteParameter createSqliteParameter(string name, Type type)
{
SqliteParameter param = new SqliteParameter();
param.ParameterName = ":" + name;
param.DbType = dbtypeFromType(type);
param.SourceColumn = name;
param.SourceVersion = DataRowVersion.Current;
return param;
}
/***********************************************************************
*
* Type conversion functions
*
**********************************************************************/
/// <summary>
/// Type conversion function
/// </summary>
/// <param name="type">a type</param>
/// <returns>a DbType</returns>
public static DbType dbtypeFromType(Type type)
{
if (type == typeof (String))
{
return DbType.String;
}
else if (type == typeof (Int32))
{
return DbType.Int32;
}
else if (type == typeof (UInt32))
{
return DbType.UInt32;
}
else if (type == typeof (Int64))
{
return DbType.Int64;
}
else if (type == typeof (UInt64))
{
return DbType.UInt64;
}
else if (type == typeof (Double))
{
return DbType.Double;
}
else if (type == typeof (Boolean))
{
return DbType.Boolean;
}
else if (type == typeof (Byte[]))
{
return DbType.Binary;
}
else
{
return DbType.String;
}
}
/// <summary>
/// </summary>
/// <param name="type">a Type</param>
/// <returns>a string</returns>
/// <remarks>this is something we'll need to implement for each db slightly differently.</remarks>
public static string sqliteType(Type type)
{
if (type == typeof (String))
{
return "varchar(255)";
}
else if (type == typeof (Int32))
{
return "integer";
}
else if (type == typeof (UInt32))
{
return "integer";
}
else if (type == typeof (Int64))
{
return "varchar(255)";
}
else if (type == typeof (UInt64))
{
return "varchar(255)";
}
else if (type == typeof (Double))
{
return "float";
}
else if (type == typeof (Boolean))
{
return "integer";
}
else if (type == typeof (Byte[]))
{
return "blob";
}
else
{
return "string";
}
}
}
}
| |
#pragma warning disable 649
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using UnityEditor;
using UnityEngine;
namespace GitHub.Unity
{
[Serializable]
class ChangesetTreeView : Subview
{
private const string BasePathLabel = "{0}";
private const string NoChangesLabel = "No changes found";
[SerializeField] private List<GitStatusEntry> entries = new List<GitStatusEntry>();
[SerializeField] private List<GitCommitTarget> entryCommitTargets = new List<GitCommitTarget>();
[SerializeField] private List<string> foldedTreeEntries = new List<string>();
[NonSerialized] private FileTreeNode tree;
public override void OnGUI()
{
GUILayout.BeginVertical();
{
// The file tree (when available)
if (tree != null && entries.Any())
{
// Base path label
if (!string.IsNullOrEmpty(tree.Path))
{
GUILayout.BeginHorizontal();
{
var iconRect = GUILayoutUtility.GetRect(Styles.CommitIconSize, Styles.CommitIconSize, GUILayout.ExpandWidth(false));
iconRect.y += 2;
iconRect.x += 2;
GUI.DrawTexture(iconRect, Styles.FolderIcon, ScaleMode.ScaleToFit);
GUILayout.Label(string.Format(BasePathLabel, tree.Path));
}
GUILayout.EndHorizontal();
}
GUILayout.BeginHorizontal();
{
GUILayout.Space(Styles.TreeIndentation + Styles.TreeRootIndentation);
GUILayout.BeginVertical();
{
// Root nodes
foreach (var node in tree.Children)
{
TreeNode(node);
}
}
GUILayout.EndVertical();
}
GUILayout.EndHorizontal();
// If we have no minimum height calculated, do that now and repaint so it can be used
if (Height == 0f && Event.current.type == EventType.Repaint)
{
Height = GUILayoutUtility.GetLastRect().yMax + Styles.MinCommitTreePadding;
Redraw();
}
GUILayout.FlexibleSpace();
}
else
{
GUILayout.FlexibleSpace();
GUILayout.BeginHorizontal();
{
GUILayout.FlexibleSpace();
GUILayout.Label(NoChangesLabel);
GUILayout.FlexibleSpace();
}
GUILayout.EndHorizontal();
GUILayout.FlexibleSpace();
}
}
GUILayout.EndVertical();
}
public void UpdateEntries(IList<GitStatusEntry> newEntries)
{
// Handle the empty list scenario
if (!newEntries.Any())
{
entries.Clear();
entryCommitTargets.Clear();
tree = null;
foldedTreeEntries.Clear();
OnCommitTreeChange();
return;
}
tree = TreeBuilder.BuildTreeRoot(newEntries, entries, entryCommitTargets, foldedTreeEntries, AssetDatabase.GetCachedIcon);
OnCommitTreeChange();
}
private void OnCommitTreeChange()
{
Height = 0f;
Redraw();
}
private void TreeNode(FileTreeNode node)
{
GUILayout.Space(Styles.TreeVerticalSpacing);
var target = node.Target;
var isFolder = node.Children.Any();
var isFolderForMeta = false;
if (node.Children.Count() == 1)
{
var parentLabel = node.Label;
var childLabel = node.Children.First().Label;
isFolderForMeta = childLabel.StartsWith(parentLabel) && childLabel.EndsWith(".meta");
}
GUILayout.BeginHorizontal();
{
if (!Readonly)
{
// Commit inclusion toggle
var state = node.State;
var toggled = state == CommitState.All;
EditorGUI.BeginChangeCheck();
{
toggled = GUILayout.Toggle(toggled, "", state == CommitState.Some ? Styles.ToggleMixedStyle : GUI.skin.toggle,
GUILayout.ExpandWidth(false));
}
if (EditorGUI.EndChangeCheck())
{
node.State = toggled ? CommitState.All : CommitState.None;
}
}
// Foldout
if (isFolder)
{
Rect foldoutRect;
if (Readonly)
{
foldoutRect = GUILayoutUtility.GetRect(1, 1);
foldoutRect.Set(foldoutRect.x - 7f, foldoutRect.y + 3f, 0f, EditorGUIUtility.singleLineHeight);
}
else
{
foldoutRect = GUILayoutUtility.GetLastRect();
}
foldoutRect.Set(foldoutRect.x - Styles.FoldoutWidth + Styles.FoldoutIndentation, foldoutRect.y, Styles.FoldoutWidth,
foldoutRect.height);
EditorGUI.BeginChangeCheck();
{
node.Open = GUI.Toggle(foldoutRect, node.Open, "", EditorStyles.foldout);
}
if (EditorGUI.EndChangeCheck())
{
if (!node.Open && !foldedTreeEntries.Contains(node.RepositoryPath))
{
foldedTreeEntries.Add(node.RepositoryPath);
}
else if (node.Open)
{
foldedTreeEntries.Remove(node.RepositoryPath);
}
OnCommitTreeChange();
}
}
GitFileStatus? status = null;
// Node icon and label
GUILayout.BeginHorizontal();
{
GUILayout.Space(Styles.CommitIconHorizontalPadding);
var iconRect = GUILayoutUtility.GetRect(Styles.CommitIconSize, Styles.CommitIconSize, GUILayout.ExpandWidth(false));
iconRect.y += 2;
iconRect.x -= 2;
if (Event.current.type == EventType.Repaint)
{
var icon = (Texture) node.Icon;
if (icon == null)
{
if (isFolderForMeta || !isFolder)
{
icon = Styles.DefaultAssetIcon;
}
else
{
icon = Styles.FolderIcon;
}
}
if (icon != null)
{
GUI.DrawTexture(iconRect,
icon,
ScaleMode.ScaleToFit);
}
}
var statusRect = new Rect(
iconRect.xMax - 9,
iconRect.yMax - 7,
9,
9);
// Current status (if any)
if (target != null)
{
var idx = entryCommitTargets.IndexOf(target);
if (idx > -1)
{
status = entries[idx].Status;
var statusIcon = Styles.GetFileStatusIcon(entries[idx].Status, false);
if (statusIcon != null)
GUI.DrawTexture(statusRect, statusIcon);
}
}
GUILayout.Space(Styles.CommitIconHorizontalPadding);
}
GUILayout.EndHorizontal();
// Make the text gray and strikethrough if the file is deleted
if (status == GitFileStatus.Deleted)
{
GUILayout.Label(new GUIContent(node.Label, node.RepositoryPath), Styles.DeletedFileLabel, GUILayout.ExpandWidth(true));
var labelRect = GUILayoutUtility.GetLastRect();
var strikeRect = new Rect(labelRect.xMin, labelRect.center.y, labelRect.width, 1);
EditorGUI.DrawRect(strikeRect, Color.gray);
}
else
{
GUILayout.Label(new GUIContent(node.Label, node.RepositoryPath), GUILayout.ExpandWidth(true));
}
GUILayout.FlexibleSpace();
}
GUILayout.EndHorizontal();
GUILayout.BeginHorizontal();
{
// Render children (if any and folded out)
if (isFolder && node.Open)
{
GUILayout.Space(Styles.TreeIndentation);
GUILayout.BeginVertical();
{
foreach (var child in node.Children)
{
TreeNode(child);
}
}
GUILayout.EndVertical();
}
}
GUILayout.EndHorizontal();
}
public override bool IsBusy
{
get { return false; }
}
public float Height { get; protected set; }
public bool Readonly { get; set; }
public IList<GitStatusEntry> Entries
{
get { return entries; }
}
public IList<GitCommitTarget> CommitTargets
{
get { return entryCommitTargets; }
}
}
}
| |
using System.Collections.Generic;
using System.IO;
using System.Net;
using System.Net.Http;
using System.Net.Http.Json;
using System.Threading.Tasks;
using Microsoft.Extensions.DependencyInjection;
using NUnit.Framework;
using Umbraco.Cms.Core;
using Umbraco.Cms.Core.Models;
using Umbraco.Cms.Core.Services;
using Umbraco.Cms.Tests.Common.Builders;
using Umbraco.Cms.Tests.Common.Builders.Extensions;
using Umbraco.Cms.Tests.Integration.TestServerTest;
using Umbraco.Cms.Web.BackOffice.Controllers;
using Umbraco.Cms.Web.Common.Formatters;
using Umbraco.Extensions;
namespace Umbraco.Cms.Tests.Integration.Umbraco.Web.BackOffice.Controllers
{
[TestFixture]
public class EntityControllerTests : UmbracoTestServerTestBase
{
[Test]
public async Task GetUrlsByIds_MediaWithIntegerIds_ReturnsValidMap()
{
IMediaTypeService mediaTypeService = Services.GetRequiredService<IMediaTypeService>();
IMediaService mediaService = Services.GetRequiredService<IMediaService>();
var mediaItems = new List<Media>();
using (ScopeProvider.CreateScope(autoComplete: true))
{
IMediaType mediaType = mediaTypeService.Get("image");
mediaTypeService.Save(mediaType);
mediaItems.Add(MediaBuilder.CreateMediaImage(mediaType, -1));
mediaItems.Add(MediaBuilder.CreateMediaImage(mediaType, -1));
foreach (Media media in mediaItems)
{
mediaService.Save(media);
}
}
var queryParameters = new Dictionary<string, object>
{
["type"] = Constants.UdiEntityType.Media
};
var url = LinkGenerator.GetUmbracoControllerUrl("GetUrlsByIds", typeof(EntityController), queryParameters);
var payload = new
{
ids = new[]
{
mediaItems[0].Id,
mediaItems[1].Id,
}
};
HttpResponseMessage response = await HttpClientJsonExtensions.PostAsJsonAsync(Client, url, payload);
// skip pointless un-parseable cruft.
(await response.Content.ReadAsStreamAsync()).Seek(AngularJsonMediaTypeFormatter.XsrfPrefix.Length, SeekOrigin.Begin);
IDictionary<int, string> results = await response.Content.ReadFromJsonAsync<IDictionary<int, string>>();
Assert.Multiple(() =>
{
Assert.AreEqual(HttpStatusCode.OK, response.StatusCode);
Assert.IsTrue(results![payload.ids[0]].StartsWith("/media"));
Assert.IsTrue(results![payload.ids[1]].StartsWith("/media"));
});
}
[Test]
public async Task GetUrlsByIds_Media_ReturnsEmptyStringsInMapForUnknownItems()
{
var queryParameters = new Dictionary<string, object>
{
["type"] = Constants.UdiEntityType.Media
};
var url = LinkGenerator.GetUmbracoControllerUrl("GetUrlsByIds", typeof(EntityController), queryParameters);
var payload = new
{
ids = new[] { 1, 2 }
};
HttpResponseMessage response = await HttpClientJsonExtensions.PostAsJsonAsync(Client, url, payload);
// skip pointless un-parseable cruft.
(await response.Content.ReadAsStreamAsync()).Seek(AngularJsonMediaTypeFormatter.XsrfPrefix.Length, SeekOrigin.Begin);
IDictionary<int, string> results = await response.Content.ReadFromJsonAsync<IDictionary<int, string>>();
Assert.Multiple(() =>
{
Assert.AreEqual(HttpStatusCode.OK, response.StatusCode);
Assert.That(results!.Keys.Count, Is.EqualTo(2));
Assert.AreEqual(results![payload.ids[0]], string.Empty);
});
}
[Test]
public async Task GetUrlsByIds_MediaWithGuidIds_ReturnsValidMap()
{
IMediaTypeService mediaTypeService = Services.GetRequiredService<IMediaTypeService>();
IMediaService mediaService = Services.GetRequiredService<IMediaService>();
var mediaItems = new List<Media>();
using (ScopeProvider.CreateScope(autoComplete: true))
{
IMediaType mediaType = mediaTypeService.Get("image");
mediaTypeService.Save(mediaType);
mediaItems.Add(MediaBuilder.CreateMediaImage(mediaType, -1));
mediaItems.Add(MediaBuilder.CreateMediaImage(mediaType, -1));
foreach (Media media in mediaItems)
{
mediaService.Save(media);
}
}
var queryParameters = new Dictionary<string, object>
{
["type"] = Constants.UdiEntityType.Media
};
var url = LinkGenerator.GetUmbracoControllerUrl("GetUrlsByIds", typeof(EntityController), queryParameters);
var payload = new
{
ids = new[]
{
mediaItems[0].Key.ToString(),
mediaItems[1].Key.ToString(),
}
};
HttpResponseMessage response = await HttpClientJsonExtensions.PostAsJsonAsync(Client, url, payload);
// skip pointless un-parseable cruft.
(await response.Content.ReadAsStreamAsync()).Seek(AngularJsonMediaTypeFormatter.XsrfPrefix.Length, SeekOrigin.Begin);
IDictionary<string, string> results = await response.Content.ReadFromJsonAsync<IDictionary<string, string>>();
Assert.Multiple(() =>
{
Assert.AreEqual(HttpStatusCode.OK, response.StatusCode);
Assert.IsTrue(results![payload.ids[0]].StartsWith("/media"));
Assert.IsTrue(results![payload.ids[1]].StartsWith("/media"));
});
}
[Test]
public async Task GetUrlsByIds_MediaWithUdiIds_ReturnsValidMap()
{
IMediaTypeService mediaTypeService = Services.GetRequiredService<IMediaTypeService>();
IMediaService mediaService = Services.GetRequiredService<IMediaService>();
var mediaItems = new List<Media>();
using (ScopeProvider.CreateScope(autoComplete: true))
{
IMediaType mediaType = mediaTypeService.Get("image");
mediaTypeService.Save(mediaType);
mediaItems.Add(MediaBuilder.CreateMediaImage(mediaType, -1));
mediaItems.Add(MediaBuilder.CreateMediaImage(mediaType, -1));
foreach (Media media in mediaItems)
{
mediaService.Save(media);
}
}
var queryParameters = new Dictionary<string, object>
{
["type"] = Constants.UdiEntityType.Media
};
var url = LinkGenerator.GetUmbracoControllerUrl("GetUrlsByIds", typeof(EntityController), queryParameters);
var payload = new
{
ids = new[]
{
mediaItems[0].GetUdi().ToString(),
mediaItems[1].GetUdi().ToString(),
}
};
HttpResponseMessage response = await HttpClientJsonExtensions.PostAsJsonAsync(Client, url, payload);
// skip pointless un-parseable cruft.
(await response.Content.ReadAsStreamAsync()).Seek(AngularJsonMediaTypeFormatter.XsrfPrefix.Length, SeekOrigin.Begin);
IDictionary<string, string> results = await response.Content.ReadFromJsonAsync<IDictionary<string, string>>();
Assert.Multiple(() =>
{
Assert.AreEqual(HttpStatusCode.OK, response.StatusCode);
Assert.IsTrue(results![payload.ids[0]].StartsWith("/media"));
Assert.IsTrue(results![payload.ids[1]].StartsWith("/media"));
});
}
[Test]
public async Task GetUrlsByIds_Documents_ReturnsHashesInMapForUnknownItems()
{
var queryParameters = new Dictionary<string, object>
{
["type"] = Constants.UdiEntityType.Document
};
var url = LinkGenerator.GetUmbracoControllerUrl("GetUrlsByIds", typeof(EntityController), queryParameters);
var payload = new
{
ids = new[] { 1, 2 }
};
HttpResponseMessage response = await HttpClientJsonExtensions.PostAsJsonAsync(Client, url, payload);
// skip pointless un-parseable cruft.
(await response.Content.ReadAsStreamAsync()).Seek(AngularJsonMediaTypeFormatter.XsrfPrefix.Length, SeekOrigin.Begin);
IDictionary<int, string> results = await response.Content.ReadFromJsonAsync<IDictionary<int, string>>();
Assert.Multiple(() =>
{
Assert.AreEqual(HttpStatusCode.OK, response.StatusCode);
Assert.That(results!.Keys.Count, Is.EqualTo(2));
Assert.AreEqual(results![payload.ids[0]], "#");
});
}
[Test]
public async Task GetUrlsByIds_DocumentWithIntIds_ReturnsValidMap()
{
IContentTypeService contentTypeService = Services.GetRequiredService<IContentTypeService>();
IContentService contentService = Services.GetRequiredService<IContentService>();
var contentItems = new List<IContent>();
using (ScopeProvider.CreateScope(autoComplete: true))
{
IContentType contentType = ContentTypeBuilder.CreateBasicContentType();
contentTypeService.Save(contentType);
ContentBuilder builder = new ContentBuilder()
.WithContentType(contentType);
Content root = builder.WithName("foo").Build();
contentService.SaveAndPublish(root);
contentItems.Add(builder.WithParent(root).WithName("bar").Build());
contentItems.Add(builder.WithParent(root).WithName("baz").Build());
foreach (IContent content in contentItems)
{
contentService.SaveAndPublish(content);
}
}
var queryParameters = new Dictionary<string, object>
{
["type"] = Constants.UdiEntityType.Document
};
var url = LinkGenerator.GetUmbracoControllerUrl("GetUrlsByIds", typeof(EntityController), queryParameters);
var payload = new
{
ids = new[]
{
contentItems[0].Id,
contentItems[1].Id,
}
};
HttpResponseMessage response = await HttpClientJsonExtensions.PostAsJsonAsync(Client, url, payload);
// skip pointless un-parseable cruft.
(await response.Content.ReadAsStreamAsync()).Seek(AngularJsonMediaTypeFormatter.XsrfPrefix.Length, SeekOrigin.Begin);
IDictionary<int, string> results = await response.Content.ReadFromJsonAsync<IDictionary<int, string>>();
Assert.Multiple(() =>
{
Assert.AreEqual(HttpStatusCode.OK, response.StatusCode);
Assert.IsTrue(results![payload.ids[0]].StartsWith("/bar"));
Assert.IsTrue(results![payload.ids[1]].StartsWith("/baz"));
});
}
[Test]
public async Task GetUrlsByIds_DocumentWithGuidIds_ReturnsValidMap()
{
IContentTypeService contentTypeService = Services.GetRequiredService<IContentTypeService>();
IContentService contentService = Services.GetRequiredService<IContentService>();
var contentItems = new List<IContent>();
using (ScopeProvider.CreateScope(autoComplete: true))
{
IContentType contentType = ContentTypeBuilder.CreateBasicContentType();
contentTypeService.Save(contentType);
ContentBuilder builder = new ContentBuilder()
.WithContentType(contentType);
Content root = builder.WithName("foo").Build();
contentService.SaveAndPublish(root);
contentItems.Add(builder.WithParent(root).WithName("bar").Build());
contentItems.Add(builder.WithParent(root).WithName("baz").Build());
foreach (IContent content in contentItems)
{
contentService.SaveAndPublish(content);
}
}
var queryParameters = new Dictionary<string, object>
{
["type"] = Constants.UdiEntityType.Document
};
var url = LinkGenerator.GetUmbracoControllerUrl("GetUrlsByIds", typeof(EntityController), queryParameters);
var payload = new
{
ids = new[]
{
contentItems[0].Key.ToString(),
contentItems[1].Key.ToString(),
}
};
HttpResponseMessage response = await HttpClientJsonExtensions.PostAsJsonAsync(Client, url, payload);
// skip pointless un-parseable cruft.
(await response.Content.ReadAsStreamAsync()).Seek(AngularJsonMediaTypeFormatter.XsrfPrefix.Length, SeekOrigin.Begin);
IDictionary<string, string> results = await response.Content.ReadFromJsonAsync<IDictionary<string, string>>();
Assert.Multiple(() =>
{
Assert.AreEqual(HttpStatusCode.OK, response.StatusCode);
Assert.IsTrue(results![payload.ids[0]].StartsWith("/bar"));
Assert.IsTrue(results![payload.ids[1]].StartsWith("/baz"));
});
}
[Test]
public async Task GetUrlsByIds_DocumentWithUdiIds_ReturnsValidMap()
{
IContentTypeService contentTypeService = Services.GetRequiredService<IContentTypeService>();
IContentService contentService = Services.GetRequiredService<IContentService>();
var contentItems = new List<IContent>();
using (ScopeProvider.CreateScope(autoComplete: true))
{
IContentType contentType = ContentTypeBuilder.CreateBasicContentType();
contentTypeService.Save(contentType);
ContentBuilder builder = new ContentBuilder()
.WithContentType(contentType);
Content root = builder.WithName("foo").Build();
contentService.SaveAndPublish(root);
contentItems.Add(builder.WithParent(root).WithName("bar").Build());
contentItems.Add(builder.WithParent(root).WithName("baz").Build());
foreach (IContent content in contentItems)
{
contentService.SaveAndPublish(content);
}
}
var queryParameters = new Dictionary<string, object>
{
["type"] = Constants.UdiEntityType.Document
};
var url = LinkGenerator.GetUmbracoControllerUrl("GetUrlsByIds", typeof(EntityController), queryParameters);
var payload = new
{
ids = new[]
{
contentItems[0].GetUdi().ToString(),
contentItems[1].GetUdi().ToString(),
}
};
HttpResponseMessage response = await HttpClientJsonExtensions.PostAsJsonAsync(Client, url, payload);
// skip pointless un-parseable cruft.
(await response.Content.ReadAsStreamAsync()).Seek(AngularJsonMediaTypeFormatter.XsrfPrefix.Length, SeekOrigin.Begin);
IDictionary<string, string> results = await response.Content.ReadFromJsonAsync<IDictionary<string, string>>();
Assert.Multiple(() =>
{
Assert.AreEqual(HttpStatusCode.OK, response.StatusCode);
Assert.IsTrue(results![payload.ids[0]].StartsWith("/bar"));
Assert.IsTrue(results![payload.ids[1]].StartsWith("/baz"));
});
}
[Test]
public async Task GetByIds_MultipleCalls_WorksAsExpected()
{
IContentTypeService contentTypeService = Services.GetRequiredService<IContentTypeService>();
IContentService contentService = Services.GetRequiredService<IContentService>();
var contentItems = new List<IContent>();
using (ScopeProvider.CreateScope(autoComplete: true))
{
IContentType contentType = ContentTypeBuilder.CreateBasicContentType();
contentTypeService.Save(contentType);
ContentBuilder builder = new ContentBuilder()
.WithContentType(contentType);
Content root = builder.WithName("foo").Build();
contentService.SaveAndPublish(root);
contentItems.Add(builder.WithParent(root).WithName("bar").Build());
contentItems.Add(builder.WithParent(root).WithName("baz").Build());
foreach (IContent content in contentItems)
{
contentService.SaveAndPublish(content);
}
}
var queryParameters = new Dictionary<string, object>
{
["type"] = Constants.UdiEntityType.Document
};
var url = LinkGenerator.GetUmbracoControllerUrl("GetByIds", typeof(EntityController), queryParameters);
var udiPayload = new
{
ids = new[]
{
contentItems[0].GetUdi().ToString(),
contentItems[1].GetUdi().ToString(),
}
};
var intPayload = new
{
ids = new[]
{
contentItems[0].Id,
contentItems[1].Id,
}
};
HttpResponseMessage udiResponse = await HttpClientJsonExtensions.PostAsJsonAsync(Client, url, udiPayload);
HttpResponseMessage intResponse = await HttpClientJsonExtensions.PostAsJsonAsync(Client, url, intPayload);
Assert.Multiple(() =>
{
Assert.AreEqual(HttpStatusCode.OK, udiResponse.StatusCode, "First request error");
Assert.AreEqual(HttpStatusCode.OK, intResponse.StatusCode, "Second request error");
});
}
}
}
| |
#region Apache License
//
// Licensed to the Apache Software Foundation (ASF) under one or more
// contributor license agreements. See the NOTICE file distributed with
// this work for additional information regarding copyright ownership.
// The ASF licenses this file to you under the Apache License, Version 2.0
// (the "License"); you may not use this file except in compliance with
// the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#endregion
using System;
using System.Text;
using System.Globalization;
using log4net.Core;
using log4net.Layout;
using log4net.Util;
namespace log4net.Appender
{
/// <summary>
/// Appends logging events to the terminal using ANSI color escape sequences.
/// </summary>
/// <remarks>
/// <para>
/// AnsiColorTerminalAppender appends log events to the standard output stream
/// or the error output stream using a layout specified by the
/// user. It also allows the color of a specific level of message to be set.
/// </para>
/// <note>
/// This appender expects the terminal to understand the VT100 control set
/// in order to interpret the color codes. If the terminal or console does not
/// understand the control codes the behavior is not defined.
/// </note>
/// <para>
/// By default, all output is written to the console's standard output stream.
/// The <see cref="Target"/> property can be set to direct the output to the
/// error stream.
/// </para>
/// <para>
/// NOTE: This appender writes each message to the <c>System.Console.Out</c> or
/// <c>System.Console.Error</c> that is set at the time the event is appended.
/// Therefore it is possible to programmatically redirect the output of this appender
/// (for example NUnit does this to capture program output). While this is the desired
/// behavior of this appender it may have security implications in your application.
/// </para>
/// <para>
/// When configuring the ANSI colored terminal appender, a mapping should be
/// specified to map a logging level to a color. For example:
/// </para>
/// <code lang="XML" escaped="true">
/// <mapping>
/// <level value="ERROR" />
/// <foreColor value="White" />
/// <backColor value="Red" />
/// <attributes value="Bright,Underscore" />
/// </mapping>
/// <mapping>
/// <level value="DEBUG" />
/// <backColor value="Green" />
/// </mapping>
/// </code>
/// <para>
/// The Level is the standard log4net logging level and ForeColor and BackColor can be any
/// of the following values:
/// <list type="bullet">
/// <item><term>Blue</term><description></description></item>
/// <item><term>Green</term><description></description></item>
/// <item><term>Red</term><description></description></item>
/// <item><term>White</term><description></description></item>
/// <item><term>Yellow</term><description></description></item>
/// <item><term>Purple</term><description></description></item>
/// <item><term>Cyan</term><description></description></item>
/// </list>
/// These color values cannot be combined together to make new colors.
/// </para>
/// <para>
/// The attributes can be any combination of the following:
/// <list type="bullet">
/// <item><term>Bright</term><description>foreground is brighter</description></item>
/// <item><term>Dim</term><description>foreground is dimmer</description></item>
/// <item><term>Underscore</term><description>message is underlined</description></item>
/// <item><term>Blink</term><description>foreground is blinking (does not work on all terminals)</description></item>
/// <item><term>Reverse</term><description>foreground and background are reversed</description></item>
/// <item><term>Hidden</term><description>output is hidden</description></item>
/// <item><term>Strikethrough</term><description>message has a line through it</description></item>
/// </list>
/// While any of these attributes may be combined together not all combinations
/// work well together, for example setting both <i>Bright</i> and <i>Dim</i> attributes makes
/// no sense.
/// </para>
/// </remarks>
/// <author>Patrick Wagstrom</author>
/// <author>Nicko Cadell</author>
public class AnsiColorTerminalAppender : AppenderSkeleton
{
#region Colors Enum
/// <summary>
/// The enum of possible display attributes
/// </summary>
/// <remarks>
/// <para>
/// The following flags can be combined together to
/// form the ANSI color attributes.
/// </para>
/// </remarks>
/// <seealso cref="AnsiColorTerminalAppender" />
[Flags]
public enum AnsiAttributes : int
{
/// <summary>
/// text is bright
/// </summary>
Bright = 1,
/// <summary>
/// text is dim
/// </summary>
Dim = 2,
/// <summary>
/// text is underlined
/// </summary>
Underscore = 4,
/// <summary>
/// text is blinking
/// </summary>
/// <remarks>
/// Not all terminals support this attribute
/// </remarks>
Blink = 8,
/// <summary>
/// text and background colors are reversed
/// </summary>
Reverse = 16,
/// <summary>
/// text is hidden
/// </summary>
Hidden = 32,
/// <summary>
/// text is displayed with a strikethrough
/// </summary>
Strikethrough = 64,
/// <summary>
/// text color is light
/// </summary>
Light = 128
}
/// <summary>
/// The enum of possible foreground or background color values for
/// use with the color mapping method
/// </summary>
/// <remarks>
/// <para>
/// The output can be in one for the following ANSI colors.
/// </para>
/// </remarks>
/// <seealso cref="AnsiColorTerminalAppender" />
public enum AnsiColor : int
{
/// <summary>
/// color is black
/// </summary>
Black = 0,
/// <summary>
/// color is red
/// </summary>
Red = 1,
/// <summary>
/// color is green
/// </summary>
Green = 2,
/// <summary>
/// color is yellow
/// </summary>
Yellow = 3,
/// <summary>
/// color is blue
/// </summary>
Blue = 4,
/// <summary>
/// color is magenta
/// </summary>
Magenta = 5,
/// <summary>
/// color is cyan
/// </summary>
Cyan = 6,
/// <summary>
/// color is white
/// </summary>
White = 7
}
#endregion
#region Public Instance Constructors
/// <summary>
/// Initializes a new instance of the <see cref="AnsiColorTerminalAppender" /> class.
/// </summary>
/// <remarks>
/// The instance of the <see cref="AnsiColorTerminalAppender" /> class is set up to write
/// to the standard output stream.
/// </remarks>
public AnsiColorTerminalAppender()
{
}
#endregion Public Instance Constructors
#region Public Instance Properties
/// <summary>
/// Target is the value of the console output stream.
/// </summary>
/// <value>
/// Target is the value of the console output stream.
/// This is either <c>"Console.Out"</c> or <c>"Console.Error"</c>.
/// </value>
/// <remarks>
/// <para>
/// Target is the value of the console output stream.
/// This is either <c>"Console.Out"</c> or <c>"Console.Error"</c>.
/// </para>
/// </remarks>
virtual public string Target
{
get { return m_writeToErrorStream ? ConsoleError : ConsoleOut; }
set
{
string trimmedTargetName = value.Trim();
if (string.Compare(ConsoleError, trimmedTargetName, true, CultureInfo.InvariantCulture) == 0)
{
m_writeToErrorStream = true;
}
else
{
m_writeToErrorStream = false;
}
}
}
/// <summary>
/// Add a mapping of level to color
/// </summary>
/// <param name="mapping">The mapping to add</param>
/// <remarks>
/// <para>
/// Add a <see cref="LevelColors"/> mapping to this appender.
/// Each mapping defines the foreground and background colours
/// for a level.
/// </para>
/// </remarks>
public void AddMapping(LevelColors mapping)
{
m_levelMapping.Add(mapping);
}
#endregion Public Instance Properties
#region Override implementation of AppenderSkeleton
/// <summary>
/// This method is called by the <see cref="M:AppenderSkeleton.DoAppend(LoggingEvent)"/> method.
/// </summary>
/// <param name="loggingEvent">The event to log.</param>
/// <remarks>
/// <para>
/// Writes the event to the console.
/// </para>
/// <para>
/// The format of the output will depend on the appender's layout.
/// </para>
/// </remarks>
override protected void Append(log4net.Core.LoggingEvent loggingEvent)
{
string loggingMessage = RenderLoggingEvent(loggingEvent);
// see if there is a specified lookup.
LevelColors levelColors = m_levelMapping.Lookup(loggingEvent.Level) as LevelColors;
if (levelColors != null)
{
// Prepend the Ansi Color code
loggingMessage = levelColors.CombinedColor + loggingMessage;
}
// on most terminals there are weird effects if we don't clear the background color
// before the new line. This checks to see if it ends with a newline, and if
// so, inserts the clear codes before the newline, otherwise the clear codes
// are inserted afterwards.
if (loggingMessage.Length > 1)
{
if (loggingMessage.EndsWith("\r\n") || loggingMessage.EndsWith("\n\r"))
{
loggingMessage = loggingMessage.Insert(loggingMessage.Length - 2, PostEventCodes);
}
else if (loggingMessage.EndsWith("\n") || loggingMessage.EndsWith("\r"))
{
loggingMessage = loggingMessage.Insert(loggingMessage.Length - 1, PostEventCodes);
}
else
{
loggingMessage = loggingMessage + PostEventCodes;
}
}
else
{
if (loggingMessage[0] == '\n' || loggingMessage[0] == '\r')
{
loggingMessage = PostEventCodes + loggingMessage;
}
else
{
loggingMessage = loggingMessage + PostEventCodes;
}
}
#if NETCF_1_0
// Write to the output stream
Console.Write(loggingMessage);
#else
if (m_writeToErrorStream)
{
// Write to the error stream
Console.Error.Write(loggingMessage);
}
else
{
// Write to the output stream
Console.Write(loggingMessage);
}
#endif
}
/// <summary>
/// This appender requires a <see cref="Layout"/> to be set.
/// </summary>
/// <value><c>true</c></value>
/// <remarks>
/// <para>
/// This appender requires a <see cref="Layout"/> to be set.
/// </para>
/// </remarks>
override protected bool RequiresLayout
{
get { return true; }
}
/// <summary>
/// Initialize the options for this appender
/// </summary>
/// <remarks>
/// <para>
/// Initialize the level to color mappings set on this appender.
/// </para>
/// </remarks>
public override void ActivateOptions()
{
base.ActivateOptions();
m_levelMapping.ActivateOptions();
}
#endregion Override implementation of AppenderSkeleton
#region Public Static Fields
/// <summary>
/// The <see cref="AnsiColorTerminalAppender.Target"/> to use when writing to the Console
/// standard output stream.
/// </summary>
/// <remarks>
/// <para>
/// The <see cref="AnsiColorTerminalAppender.Target"/> to use when writing to the Console
/// standard output stream.
/// </para>
/// </remarks>
public const string ConsoleOut = "Console.Out";
/// <summary>
/// The <see cref="AnsiColorTerminalAppender.Target"/> to use when writing to the Console
/// standard error output stream.
/// </summary>
/// <remarks>
/// <para>
/// The <see cref="AnsiColorTerminalAppender.Target"/> to use when writing to the Console
/// standard error output stream.
/// </para>
/// </remarks>
public const string ConsoleError = "Console.Error";
#endregion Public Static Fields
#region Private Instances Fields
/// <summary>
/// Flag to write output to the error stream rather than the standard output stream
/// </summary>
private bool m_writeToErrorStream = false;
/// <summary>
/// Mapping from level object to color value
/// </summary>
private LevelMapping m_levelMapping = new LevelMapping();
/// <summary>
/// Ansi code to reset terminal
/// </summary>
private const string PostEventCodes = "\x1b[0m";
#endregion Private Instances Fields
#region LevelColors LevelMapping Entry
/// <summary>
/// A class to act as a mapping between the level that a logging call is made at and
/// the color it should be displayed as.
/// </summary>
/// <remarks>
/// <para>
/// Defines the mapping between a level and the color it should be displayed in.
/// </para>
/// </remarks>
public class LevelColors : LevelMappingEntry
{
private AnsiColor m_foreColor;
private AnsiColor m_backColor;
private AnsiAttributes m_attributes;
private string m_combinedColor = "";
/// <summary>
/// The mapped foreground color for the specified level
/// </summary>
/// <remarks>
/// <para>
/// Required property.
/// The mapped foreground color for the specified level
/// </para>
/// </remarks>
public AnsiColor ForeColor
{
get { return m_foreColor; }
set { m_foreColor = value; }
}
/// <summary>
/// The mapped background color for the specified level
/// </summary>
/// <remarks>
/// <para>
/// Required property.
/// The mapped background color for the specified level
/// </para>
/// </remarks>
public AnsiColor BackColor
{
get { return m_backColor; }
set { m_backColor = value; }
}
/// <summary>
/// The color attributes for the specified level
/// </summary>
/// <remarks>
/// <para>
/// Required property.
/// The color attributes for the specified level
/// </para>
/// </remarks>
public AnsiAttributes Attributes
{
get { return m_attributes; }
set { m_attributes = value; }
}
/// <summary>
/// Initialize the options for the object
/// </summary>
/// <remarks>
/// <para>
/// Combine the <see cref="ForeColor"/> and <see cref="BackColor"/> together
/// and append the attributes.
/// </para>
/// </remarks>
public override void ActivateOptions()
{
base.ActivateOptions();
StringBuilder buf = new StringBuilder();
// Reset any existing codes
buf.Append("\x1b[0;");
int lightAdjustment = ((m_attributes & AnsiAttributes.Light) > 0) ? 60 : 0;
// set the foreground color
buf.Append(30 + lightAdjustment + (int)m_foreColor);
buf.Append(';');
// set the background color
buf.Append(40 + lightAdjustment + (int)m_backColor);
// set the attributes
if ((m_attributes & AnsiAttributes.Bright) > 0)
{
buf.Append(";1");
}
if ((m_attributes & AnsiAttributes.Dim) > 0)
{
buf.Append(";2");
}
if ((m_attributes & AnsiAttributes.Underscore) > 0)
{
buf.Append(";4");
}
if ((m_attributes & AnsiAttributes.Blink) > 0)
{
buf.Append(";5");
}
if ((m_attributes & AnsiAttributes.Reverse) > 0)
{
buf.Append(";7");
}
if ((m_attributes & AnsiAttributes.Hidden) > 0)
{
buf.Append(";8");
}
if ((m_attributes & AnsiAttributes.Strikethrough) > 0)
{
buf.Append(";9");
}
buf.Append('m');
m_combinedColor = buf.ToString();
}
/// <summary>
/// The combined <see cref="ForeColor"/>, <see cref="BackColor"/> and
/// <see cref="Attributes"/> suitable for setting the ansi terminal color.
/// </summary>
internal string CombinedColor
{
get { return m_combinedColor; }
}
}
#endregion // LevelColors LevelMapping Entry
}
}
| |
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Drawing;
using System.Drawing.Drawing2D;
using System.Linq;
using System.Xml;
using System.Xml.Serialization;
using AgenaTrader.API;
using AgenaTrader.Custom;
using AgenaTrader.Plugins;
using AgenaTrader.Helper;
/// <summary>
/// Version: 1.1.0
/// -------------------------------------------------------------------------
/// Simon Pucher 2016
/// -------------------------------------------------------------------------
/// Inspired by https://www.youtube.com/watch?v=Qj_6DFTNfjE
/// -------------------------------------------------------------------------
/// ****** Important ******
/// To compile this script without any error you also need access to the utility indicator to use global source code elements.
/// You will find this script on GitHub: https://raw.githubusercontent.com/simonpucher/AgenaTrader/master/Utilities/GlobalUtilities_Utility.cs
/// -------------------------------------------------------------------------
/// Namespace holds all indicators and is required. Do not change it.
/// </summary>
namespace AgenaTrader.UserCode
{
[Description("Watch out for the lonely warrior behind enemy lines.")]
public class Lonely_Warrior_Indicator : UserIndicator
{
//input
private bool _showarrows = true;
private Color _plot0color = Const.DefaultIndicatorColor;
private int _plot0width = Const.DefaultLineWidth;
private DashStyle _plot0dashstyle = Const.DefaultIndicatorDashStyle;
private Color _plot1color = Const.DefaultIndicatorColor_GreyedOut;
private int _plot1width = Const.DefaultLineWidth;
private DashStyle _plot1dashstyle = Const.DefaultIndicatorDashStyle;
/// <summary>
/// This method is used to configure the indicator and is called once before any bar data is loaded.
/// </summary>
protected override void OnInit()
{
Add(new OutputDescriptor(new Pen(this.Plot0Color, this.Plot0Width), OutputSerieDrawStyle.Line, "Plot_Line"));
CalculateOnClosedBar = true;
IsOverlay = false;
IsAutoAdjustableScale = true;
//Because of Backtesting reasons if we use the advanced mode we need at least two bars
this.RequiredBarsCount = 20;
}
protected override void OnCalculate()
{
Bollinger bb = Bollinger(2, 20);
AddChartLine("Plot_Middle" + Time[0].ToString(), this.IsAutoAdjustableScale, 1, bb.Middle[1], 0, bb.Middle[0], this.Plot1Color, this.Dash1Style, this.Plot1Width);
AddChartLine("Plot_Low" + Time[0].ToString(), this.IsAutoAdjustableScale, 1, bb.Lower[1], 0, bb.Lower[0], this.Plot0Color, this.Dash0Style, this.Plot0Width);
AddChartLine("Plot_High" + Time[0].ToString(), this.IsAutoAdjustableScale, 1, bb.Upper[1], 0, bb.Upper[0], this.Plot0Color, this.Dash0Style, this.Plot0Width);
if (High[0] < bb.Lower[0] || Low[0] > bb.Upper[0])
{
//ok
}
else
{
this.BarColor = Color.White;
}
//Trigger
double signal = 0;
if (High[1] < bb.Lower[1])
{
if (Low[0] > High[1] || High[0] > High[1])
{
if (ShowArrows)
{
AddChartArrowUp("ArrowLong_Entry" + +Bars[0].Time.Ticks, this.IsAutoAdjustableScale, 0, Bars[0].Low, Color.LightGreen);
}
signal = 1;
}
}
else if (Low[1] > bb.Upper[1])
{
if (Low[0] < Low[1] || High[0] < Low[1])
{
if (ShowArrows)
{
AddChartArrowDown("ArrowShort_Entry" + +Bars[0].Time.Ticks, this.IsAutoAdjustableScale, 0, Bars[0].High, Color.Red);
}
signal = -1;
}
}
PlotLine.Set(signal);
PlotColors[0][0] = this.Plot0Color;
OutputDescriptors[0].PenStyle = this.Dash0Style;
OutputDescriptors[0].Pen.Width = this.Plot0Width;
}
public override string ToString()
{
return "Lonely Warrior (I)";
}
public override string DisplayName
{
get
{
return "Lonely Warrior (I)";
}
}
#region Properties
[Browsable(false)]
[XmlIgnore()]
public DataSeries PlotLine
{
get { return Outputs[0]; }
}
/// <summary>
/// </summary>
[Description("If true then arrows are drawn on the chart.")]
[Category("Plots")]
[DisplayName("Show arrows")]
public bool ShowArrows
{
get { return _showarrows; }
set { _showarrows = value; }
}
/// <summary>
/// </summary>
[Description("Select Color for the indicator.")]
[Category("Plots")]
[DisplayName("Color")]
public Color Plot0Color
{
get { return _plot0color; }
set { _plot0color = value; }
}
// Serialize Color object
[Browsable(false)]
public string Plot0ColorSerialize
{
get { return SerializableColor.ToString(_plot0color); }
set { _plot0color = SerializableColor.FromString(value); }
}
/// <summary>
/// </summary>
[Description("Line width for indicator.")]
[Category("Plots")]
[DisplayName("Line width")]
public int Plot0Width
{
get { return _plot0width; }
set { _plot0width = Math.Max(1, value); }
}
/// <summary>
/// </summary>
[Description("DashStyle for indicator.")]
[Category("Plots")]
[DisplayName("DashStyle")]
public DashStyle Dash0Style
{
get { return _plot0dashstyle; }
set { _plot0dashstyle = value; }
}
/// <summary>
/// </summary>
[Description("Select color for the indicator.")]
[Category("Plots")]
[DisplayName("Color")]
public Color Plot1Color
{
get { return _plot1color; }
set { _plot1color = value; }
}
// Serialize Color object
[Browsable(false)]
public string Plot1ColorSerialize
{
get { return SerializableColor.ToString(_plot1color); }
set { _plot1color = SerializableColor.FromString(value); }
}
/// <summary>
/// </summary>
[Description("Line width for indicator.")]
[Category("Plots")]
[DisplayName("Line width")]
public int Plot1Width
{
get { return _plot1width; }
set { _plot1width = Math.Max(1, value); }
}
/// <summary>
/// </summary>
[Description("DashStyle for indicator.")]
[Category("Plots")]
[DisplayName("DashStyle")]
public DashStyle Dash1Style
{
get { return _plot1dashstyle; }
set { _plot1dashstyle = value; }
}
#endregion
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using NUnit.Framework;
using OpenMetaverse;
using OpenSim.Tests.Common;
using System;
using System.Globalization;
using System.Threading;
namespace OpenSim.Framework.Tests
{
[TestFixture]
public class MundaneFrameworkTests : OpenSimTestCase
{
private bool m_RegionSettingsOnSaveEventFired;
private bool m_RegionLightShareDataOnSaveEventFired;
[Test]
public void ChildAgentDataUpdate01()
{
// code coverage
ChildAgentDataUpdate cadu = new ChildAgentDataUpdate();
Assert.IsFalse(cadu.alwaysrun, "Default is false");
}
[Test]
public void AgentPositionTest01()
{
UUID AgentId1 = UUID.Random();
UUID SessionId1 = UUID.Random();
uint CircuitCode1 = uint.MinValue;
Vector3 Size1 = Vector3.UnitZ;
Vector3 Position1 = Vector3.UnitX;
Vector3 LeftAxis1 = Vector3.UnitY;
Vector3 UpAxis1 = Vector3.UnitZ;
Vector3 AtAxis1 = Vector3.UnitX;
ulong RegionHandle1 = ulong.MinValue;
byte[] Throttles1 = new byte[] { 0, 1, 0 };
Vector3 Velocity1 = Vector3.Zero;
float Far1 = 256;
bool ChangedGrid1 = false;
Vector3 Center1 = Vector3.Zero;
AgentPosition position1 = new AgentPosition();
position1.AgentID = AgentId1;
position1.SessionID = SessionId1;
position1.CircuitCode = CircuitCode1;
position1.Size = Size1;
position1.Position = Position1;
position1.LeftAxis = LeftAxis1;
position1.UpAxis = UpAxis1;
position1.AtAxis = AtAxis1;
position1.RegionHandle = RegionHandle1;
position1.Throttles = Throttles1;
position1.Velocity = Velocity1;
position1.Far = Far1;
position1.ChangedGrid = ChangedGrid1;
position1.Center = Center1;
ChildAgentDataUpdate cadu = new ChildAgentDataUpdate();
cadu.AgentID = AgentId1.Guid;
cadu.ActiveGroupID = UUID.Zero.Guid;
cadu.throttles = Throttles1;
cadu.drawdistance = Far1;
cadu.Position = Position1;
cadu.Velocity = Velocity1;
cadu.regionHandle = RegionHandle1;
cadu.cameraPosition = Center1;
cadu.AVHeight = Size1.Z;
AgentPosition position2 = new AgentPosition();
position2.CopyFrom(cadu, position1.SessionID);
Assert.IsTrue(
position2.AgentID == position1.AgentID
&& position2.Size == position1.Size
&& position2.Position == position1.Position
&& position2.Velocity == position1.Velocity
&& position2.Center == position1.Center
&& position2.RegionHandle == position1.RegionHandle
&& position2.Far == position1.Far
, "Copy From ChildAgentDataUpdate failed");
position2 = new AgentPosition();
Assert.IsFalse(position2.AgentID == position1.AgentID, "Test Error, position2 should be a blank uninitialized AgentPosition");
position2.Unpack(position1.Pack(), null);
Assert.IsTrue(position2.AgentID == position1.AgentID, "Agent ID didn't unpack the same way it packed");
Assert.IsTrue(position2.Position == position1.Position, "Position didn't unpack the same way it packed");
Assert.IsTrue(position2.Velocity == position1.Velocity, "Velocity didn't unpack the same way it packed");
Assert.IsTrue(position2.SessionID == position1.SessionID, "SessionID didn't unpack the same way it packed");
Assert.IsTrue(position2.CircuitCode == position1.CircuitCode, "CircuitCode didn't unpack the same way it packed");
Assert.IsTrue(position2.LeftAxis == position1.LeftAxis, "LeftAxis didn't unpack the same way it packed");
Assert.IsTrue(position2.UpAxis == position1.UpAxis, "UpAxis didn't unpack the same way it packed");
Assert.IsTrue(position2.AtAxis == position1.AtAxis, "AtAxis didn't unpack the same way it packed");
Assert.IsTrue(position2.RegionHandle == position1.RegionHandle, "RegionHandle didn't unpack the same way it packed");
Assert.IsTrue(position2.ChangedGrid == position1.ChangedGrid, "ChangedGrid didn't unpack the same way it packed");
Assert.IsTrue(position2.Center == position1.Center, "Center didn't unpack the same way it packed");
Assert.IsTrue(position2.Size == position1.Size, "Size didn't unpack the same way it packed");
}
[Test]
public void RegionSettingsTest01()
{
RegionSettings settings = new RegionSettings();
settings.OnSave += RegionSaveFired;
settings.Save();
settings.OnSave -= RegionSaveFired;
// string str = settings.LoadedCreationDate;
// int dt = settings.LoadedCreationDateTime;
// string id = settings.LoadedCreationID;
// string time = settings.LoadedCreationTime;
Assert.That(m_RegionSettingsOnSaveEventFired, "RegionSettings Save Event didn't Fire");
}
public void RegionSaveFired(RegionSettings settings)
{
m_RegionSettingsOnSaveEventFired = true;
}
[Test]
public void InventoryItemBaseConstructorTest01()
{
InventoryItemBase b1 = new InventoryItemBase();
Assert.That(b1.ID == UUID.Zero, "void constructor should create an inventory item with ID = UUID.Zero");
Assert.That(b1.Owner == UUID.Zero, "void constructor should create an inventory item with Owner = UUID.Zero");
UUID ItemID = UUID.Random();
UUID OwnerID = UUID.Random();
InventoryItemBase b2 = new InventoryItemBase(ItemID);
Assert.That(b2.ID == ItemID, "ID constructor should create an inventory item with ID = ItemID");
Assert.That(b2.Owner == UUID.Zero, "ID constructor should create an inventory item with Owner = UUID.Zero");
InventoryItemBase b3 = new InventoryItemBase(ItemID, OwnerID);
Assert.That(b3.ID == ItemID, "ID,OwnerID constructor should create an inventory item with ID = ItemID");
Assert.That(b3.Owner == OwnerID, "ID,OwnerID constructor should create an inventory item with Owner = OwnerID");
}
[Test]
public void AssetMetaDataNonNullContentTypeTest01()
{
AssetMetadata assetMetadata = new AssetMetadata();
assetMetadata.ContentType = "image/jp2";
Assert.That(assetMetadata.Type == (sbyte)AssetType.Texture, "Content type should be AssetType.Texture");
Assert.That(assetMetadata.ContentType == "image/jp2", "Text of content type should be image/jp2");
UUID rndID = UUID.Random();
assetMetadata.ID = rndID.ToString();
Assert.That(assetMetadata.ID.ToLower() == rndID.ToString().ToLower(), "assetMetadata.ID Setter/Getter not Consistent");
DateTime fixedTime = DateTime.Now;
assetMetadata.CreationDate = fixedTime;
}
[Test]
public void RegionLightShareDataCloneSaveTest01()
{
RegionLightShareData rlsd = new RegionLightShareData();
rlsd.OnSave += RegionLightShareDataSaveFired;
rlsd.Save();
rlsd.OnSave -= RegionLightShareDataSaveFired;
Assert.IsTrue(m_RegionLightShareDataOnSaveEventFired, "OnSave Event Never Fired");
object o = rlsd.Clone();
RegionLightShareData dupe = (RegionLightShareData)o;
Assert.IsTrue(rlsd.sceneGamma == dupe.sceneGamma, "Memberwise Clone of RegionLightShareData failed");
}
public void RegionLightShareDataSaveFired(RegionLightShareData settings)
{
m_RegionLightShareDataOnSaveEventFired = true;
}
[Test]
public void EstateSettingsMundateTests()
{
EstateSettings es = new EstateSettings();
es.AddBan(null);
UUID bannedUserId = UUID.Random();
es.AddBan(new EstateBan()
{
BannedHostAddress = string.Empty,
BannedHostIPMask = string.Empty,
BannedHostNameMask = string.Empty,
BannedUserID = bannedUserId
}
);
Assert.IsTrue(es.IsBanned(bannedUserId), "User Should be banned but is not.");
Assert.IsFalse(es.IsBanned(UUID.Zero), "User Should not be banned but is.");
es.RemoveBan(bannedUserId);
Assert.IsFalse(es.IsBanned(bannedUserId), "User Should not be banned but is.");
es.AddEstateManager(UUID.Zero);
es.AddEstateManager(bannedUserId);
Assert.IsTrue(es.IsEstateManagerOrOwner(bannedUserId), "bannedUserId should be EstateManager but isn't.");
es.RemoveEstateManager(bannedUserId);
Assert.IsFalse(es.IsEstateManagerOrOwner(bannedUserId), "bannedUserID is estateManager but shouldn't be");
Assert.IsFalse(es.HasAccess(bannedUserId), "bannedUserID has access but shouldn't");
es.AddEstateUser(bannedUserId);
Assert.IsTrue(es.HasAccess(bannedUserId), "bannedUserID doesn't have access but should");
es.RemoveEstateUser(bannedUserId);
es.AddEstateManager(bannedUserId);
Assert.IsTrue(es.HasAccess(bannedUserId), "bannedUserID doesn't have access but should");
Assert.That(es.EstateGroups.Length == 0, "No Estate Groups Added.. so the array should be 0 length");
es.AddEstateGroup(bannedUserId);
Assert.That(es.EstateGroups.Length == 1, "1 Estate Groups Added.. so the array should be 1 length");
Assert.That(es.EstateGroups[0] == bannedUserId, "User ID should be in EstateGroups");
}
[Test]
public void InventoryFolderBaseConstructorTest01()
{
UUID uuid1 = UUID.Random();
UUID uuid2 = UUID.Random();
InventoryFolderBase fld = new InventoryFolderBase(uuid1);
Assert.That(fld.ID == uuid1, "ID constructor failed to save value in ID field.");
fld = new InventoryFolderBase(uuid1, uuid2);
Assert.That(fld.ID == uuid1, "ID,Owner constructor failed to save value in ID field.");
Assert.That(fld.Owner == uuid2, "ID,Owner constructor failed to save value in ID field.");
}
[Test]
public void AsssetBaseConstructorTest01()
{
AssetBase abase = new AssetBase();
Assert.IsNotNull(abase.Metadata, "void constructor of AssetBase should have created a MetaData element but didn't.");
UUID itemID = UUID.Random();
UUID creatorID = UUID.Random();
abase = new AssetBase(itemID.ToString(), "test item", (sbyte)AssetType.Texture, creatorID.ToString());
Assert.IsNotNull(abase.Metadata, "string,string,sbyte,string constructor of AssetBase should have created a MetaData element but didn't.");
Assert.That(abase.ID == itemID.ToString(), "string,string,sbyte,string constructor failed to set ID property");
Assert.That(abase.Metadata.CreatorID == creatorID.ToString(), "string,string,sbyte,string constructor failed to set Creator ID");
abase = new AssetBase(itemID.ToString(), "test item", -1, creatorID.ToString());
Assert.IsNotNull(abase.Metadata, "string,string,sbyte,string constructor of AssetBase with unknown type should have created a MetaData element but didn't.");
Assert.That(abase.Metadata.Type == -1, "Unknown Type passed to string,string,sbyte,string constructor and was a known type when it came out again");
AssetMetadata metts = new AssetMetadata();
metts.FullID = itemID;
metts.ID = string.Empty;
metts.Name = "test item";
abase.Metadata = metts;
Assert.That(abase.ToString() == itemID.ToString(), "ToString is overriden to be fullID.ToString()");
Assert.That(abase.ID == itemID.ToString(), "ID should be MetaData.FullID.ToString() when string.empty or null is provided to the ID property");
}
[Test]
public void CultureSetCultureTest01()
{
CultureInfo ci = new CultureInfo("en-US", false);
Culture.SetCurrentCulture();
Assert.That(Thread.CurrentThread.CurrentCulture.Name == ci.Name, "SetCurrentCulture failed to set thread culture to en-US");
}
}
}
| |
using System;
namespace Loon.Core.Geom {
public class Line : Shape {
private const long serialVersionUID = 1L;
private Vector2f start;
private Vector2f end;
private Vector2f vec;
private Vector2f loc;
private Vector2f closest;
public Line(float x, float y, bool inner, bool outer) :this(0, 0, x, y){
}
public Line(float x, float y):this(x, y, true, true){
}
public Line(Point p1, Point p2): this(p1.x, p1.y, p2.x, p2.y) {
}
public Line(float x1, float y1, float x2, float y2):this(new Vector2f(x1, y1), new Vector2f(x2, y2)) {
}
public Line(float x1, float y1, float dx, float dy, bool dummy):this(new Vector2f(x1, y1), new Vector2f(x1 + dx, y1 + dy)) {
}
public Line(float[] start_0, float[] end_1):base() {
this.loc = new Vector2f(0, 0);
this.closest = new Vector2f(0, 0);
Set(start_0, end_1);
}
public Line(Vector2f start_0, Vector2f end_1)
: base()
{
this.loc = new Vector2f(0, 0);
this.closest = new Vector2f(0, 0);
Set(start_0, end_1);
}
public void Set(float[] start_0, float[] end_1) {
Set(start_0[0], start_0[1], end_1[0], end_1[1]);
}
public Vector2f GetStart() {
return start;
}
public Vector2f GetEnd() {
return end;
}
public override float Length() {
return vec.Len();
}
public float LengthSquared() {
return vec.LengthSquared();
}
public void Set(Vector2f start_0, Vector2f end_1) {
base.pointsDirty = true;
if (this.start == null) {
this.start = new Vector2f();
}
this.start.Set(start_0);
if (this.end == null) {
this.end = new Vector2f();
}
this.end.Set(end_1);
vec = new Vector2f(end_1);
vec.Sub(start_0);
}
public void Set(float sx, float sy, float ex, float ey) {
base.pointsDirty = true;
start.Set(sx, sy);
end.Set(ex, ey);
float dx = (ex - sx);
float dy = (ey - sy);
vec.Set(dx, dy);
}
public float GetDX() {
return end.GetX() - start.GetX();
}
public float GetDY() {
return end.GetY() - start.GetY();
}
public override float GetX() {
return GetX1();
}
public override float GetY() {
return GetY1();
}
public float GetX1() {
return start.GetX();
}
public float GetY1() {
return start.GetY();
}
public float GetX2() {
return end.GetX();
}
public float GetY2() {
return end.GetY();
}
public float Distance(Vector2f point) {
return (float) Math.Sqrt(DistanceSquared(point));
}
public bool On(Vector2f point) {
GetClosestPoint(point, closest);
return point.Equals(closest);
}
public float DistanceSquared(Vector2f point) {
GetClosestPoint(point, closest);
closest.Sub(point);
float result = closest.LengthSquared();
return result;
}
public void GetClosestPoint(Vector2f point, Vector2f result) {
loc.Set(point);
loc.Sub(start);
float projDistance = vec.Dot(loc);
projDistance /= vec.LengthSquared();
if (projDistance < 0) {
result.Set(start);
return;
}
if (projDistance > 1) {
result.Set(end);
return;
}
result.x = start.GetX() + projDistance * vec.GetX();
result.y = start.GetY() + projDistance * vec.GetY();
}
public Vector2f Intersect(Line other) {
return Intersect(other, false);
}
public Vector2f Intersect(Line other, bool limit) {
Vector2f temp = new Vector2f();
if (!Intersect(other, limit, temp)) {
return null;
}
return temp;
}
public bool Intersect(Line other, bool limit, Vector2f result) {
float dx1 = end.GetX() - start.GetX();
float dx2 = other.end.GetX() - other.start.GetX();
float dy1 = end.GetY() - start.GetY();
float dy2 = other.end.GetY() - other.start.GetY();
float denom = (dy2 * dx1) - (dx2 * dy1);
if (denom == 0) {
return false;
}
float ua = (dx2 * (start.GetY() - other.start.GetY()))
- (dy2 * (start.GetX() - other.start.GetX()));
ua /= denom;
float ub = (dx1 * (start.GetY() - other.start.GetY()))
- (dy1 * (start.GetX() - other.start.GetX()));
ub /= denom;
if ((limit) && ((ua < 0) || (ua > 1) || (ub < 0) || (ub > 1))) {
return false;
}
float u = ua;
float ix = start.GetX() + (u * (end.GetX() - start.GetX()));
float iy = start.GetY() + (u * (end.GetY() - start.GetY()));
result.Set(ix, iy);
return true;
}
protected internal override void CreatePoints() {
points = new float[4];
points[0] = GetX1();
points[1] = GetY1();
points[2] = GetX2();
points[3] = GetY2();
}
public float PtSegDistSq(Point pt) {
return PtSegDistSq(GetX1(), GetY1(), GetX2(), GetY2(), pt.GetX(),
pt.GetY());
}
public float PtSegDistSq(float px, float py) {
return PtSegDistSq(GetX1(), GetY1(), GetX2(), GetY2(), px, py);
}
public static float PtSegDist(float x1, float y1, float x2, float y2,
float px, float py) {
return (float) Math.Sqrt(PtSegDistSq(x1, y1, x2, y2, px, py));
}
public static float PtSegDistSq(float x1, float y1, float x2, float y2,
float px, float py) {
x2 -= x1;
y2 -= y1;
px -= x1;
py -= y1;
float dotprod = px * x2 + py * y2;
float projlenSq;
if (dotprod <= 0.0d) {
projlenSq = 0.0f;
} else {
px = x2 - px;
py = y2 - py;
dotprod = px * x2 + py * y2;
if (dotprod <= 0.0d) {
projlenSq = 0.0f;
} else {
projlenSq = dotprod * dotprod / (x2 * x2 + y2 * y2);
}
}
float lenSq = px * px + py * py - projlenSq;
if (lenSq < 0) {
lenSq = 0;
}
return lenSq;
}
public static float PtLineDist(float x1, float y1, float x2, float y2,
float px, float py) {
return (float) Math.Sqrt(PtLineDistSq(x1, y1, x2, y2, px, py));
}
public float PtLineDist(Point pt) {
return PtLineDist(GetX1(), GetY1(), GetX2(), GetY2(), pt.GetX(),
pt.GetY());
}
public float PtLineDistSq(float px, float py) {
return PtLineDistSq(GetX1(), GetY1(), GetX2(), GetY2(), px, py);
}
public float PtLineDistSq(Point pt) {
return PtLineDistSq(GetX1(), GetY1(), GetX2(), GetY2(), pt.GetX(),
pt.GetY());
}
public static float PtLineDistSq(float x1, float y1, float x2, float y2,
float px, float py) {
x2 -= x1;
y2 -= y1;
px -= x1;
py -= y1;
float dotprod = px * x2 + py * y2;
float projlenSq = dotprod * dotprod / (x2 * x2 + y2 * y2);
float lenSq = px * px + py * py - projlenSq;
if (lenSq < 0) {
lenSq = 0;
}
return lenSq;
}
public override Shape Transform(Matrix transform) {
float[] temp = new float[4];
CreatePoints();
transform.Transform(points, 0, temp, 0, 2);
return new Line(temp[0], temp[1], temp[2], temp[3]);
}
public override bool Closed() {
return false;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Linq.Expressions;
using Xunit;
namespace System.Linq.Tests
{
public class MaxTests : EnumerableBasedTests
{
[Fact]
public void NullInt32Source()
{
AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<int>)null).Max());
}
[Fact]
public void EmptyInt32()
{
Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<int>().AsQueryable().Max());
}
[Fact]
public void Int32MaxRepeated()
{
int[] source = { -6, 0, -9, 0, -10, 0 };
Assert.Equal(0, source.AsQueryable().Max());
}
[Fact]
public void NullInt64Source()
{
AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<long>)null).Max());
}
[Fact]
public void EmptyInt64()
{
Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<long>().AsQueryable().Max());
}
[Fact]
public void Int64MaxRepeated()
{
long[] source = { 6, 50, 9, 50, 10, 50 };
Assert.Equal(50, source.AsQueryable().Max());
}
[Fact]
public void EmptySingle()
{
Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<float>().AsQueryable().Max());
}
[Fact]
public void Single_MaxRepeated()
{
float[] source = { -5.5f, float.PositiveInfinity, 9.9f, float.PositiveInfinity };
Assert.True(float.IsPositiveInfinity(source.AsQueryable().Max()));
}
[Fact]
public void NullSingleSource()
{
AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<float>)null).Max());
}
[Fact]
public void NullDoubleSource()
{
AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<double>)null).Max());
}
[Fact]
public void EmptyDouble()
{
Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<double>().AsQueryable().Max());
}
[Fact]
public void DoubleMaximumRepeated()
{
double[] source = { -5.5, double.PositiveInfinity, 9.9, double.PositiveInfinity };
Assert.True(double.IsPositiveInfinity(source.AsQueryable().Max()));
}
[Fact]
public void NullDecimalSource()
{
AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<decimal>)null).Max());
}
[Fact]
public void EmptyDecimal()
{
Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<decimal>().AsQueryable().Max());
}
[Fact]
public void DecimalMaximumRepeated()
{
decimal[] source = { -5.5m, 0m, 9.9m, -5.5m, 9.9m };
Assert.Equal(9.9m, source.AsQueryable().Max());
}
[Fact]
public void NullNullableInt32Source()
{
AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<int?>)null).Max());
}
[Fact]
public void EmptyNullableInt32()
{
Assert.Null(Enumerable.Empty<int?>().AsQueryable().Max());
}
[Fact]
public void NullableInt32MaxRepeated()
{
int?[] source = { 6, null, null, 100, 9, 100, 10, 100 };
Assert.Equal(100, source.AsQueryable().Max());
}
[Fact]
public void NullNullableInt64Source()
{
AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<long?>)null).Max());
}
[Fact]
public void EmptyNullableInt64()
{
Assert.Null(Enumerable.Empty<long?>().AsQueryable().Max(x => x));
}
[Fact]
public void NullableInt64MaximumRepeated()
{
long?[] source = { -6, null, null, 0, -9, 0, -10, -30 };
Assert.Equal(0, source.AsQueryable().Max());
}
[Fact]
public void EmptyNullableSingle()
{
Assert.Null(Enumerable.Empty<float?>().AsQueryable().Max());
}
[Fact]
public void NullableSingleMaxRepeated()
{
float?[] source = { -6.4f, null, null, -0.5f, -9.4f, -0.5f, -10.9f, -0.5f };
Assert.Equal(-0.5f, source.AsQueryable().Max());
}
[Fact]
public void NullNullableDoubleSource()
{
AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<double?>)null).Max());
}
[Fact]
public void EmptyNullableDouble()
{
Assert.Null(Enumerable.Empty<double?>().AsQueryable().Max());
}
[Fact]
public void NullNullableDecimalSource()
{
AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<decimal?>)null).Max());
}
[Fact]
public void EmptyNullableDecimal()
{
Assert.Null(Enumerable.Empty<decimal?>().AsQueryable().Max());
}
[Fact]
public void NullableDecimalMaximumRepeated()
{
decimal?[] source = { 6.4m, null, null, decimal.MaxValue, 9.4m, decimal.MaxValue, 10.9m, decimal.MaxValue };
Assert.Equal(decimal.MaxValue, source.AsQueryable().Max());
}
[Fact]
public void EmptyDateTime()
{
Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<DateTime>().AsQueryable().Max());
}
[Fact]
public void NullDateTimeSource()
{
AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<DateTime>)null).Max());
}
[Fact]
public void NullStringSource()
{
AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<string>)null).Max());
}
[Fact]
public void EmptyString()
{
Assert.Null(Enumerable.Empty<string>().AsQueryable().Max());
}
[Fact]
public void StringMaximumRepeated()
{
string[] source = { "ooo", "ccc", "ccc", "ooo", "ooo", "nnn" };
Assert.Equal("ooo", source.AsQueryable().Max());
}
[Fact]
public void EmptyInt32WithSelector()
{
Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<int>().Max(x => x));
}
[Fact]
public void NullInt32SourceWithSelector()
{
AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<int>)null).Max(i => i));
}
[Fact]
public void Int32SourceWithNullSelector()
{
Expression<Func<int, int>> selector = null;
AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<int>().AsQueryable().Max(selector));
}
[Fact]
public void MaxInt32WithSelectorAccessingProperty()
{
var source = new[]{
new { name="Tim", num=10 },
new { name="John", num=-105 },
new { name="Bob", num=30 }
};
Assert.Equal(30, source.AsQueryable().Max(e => e.num));
}
[Fact]
public void EmptyInt64WithSelector()
{
Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<long>().AsQueryable().Max(x => x));
}
[Fact]
public void NullInt64SourceWithSelector()
{
AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<long>)null).Max(i => i));
}
[Fact]
public void Int64SourceWithNullSelector()
{
Expression<Func<long, long>> selector = null;
AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<long>().AsQueryable().Max(selector));
}
[Fact]
public void MaxInt64WithSelectorAccessingProperty()
{
var source = new[]{
new { name="Tim", num=10L },
new { name="John", num=-105L },
new { name="Bob", num=long.MaxValue }
};
Assert.Equal(long.MaxValue, source.AsQueryable().Max(e => e.num));
}
[Fact]
public void MaxSingleWithSelectorAccessingProperty()
{
var source = new []
{
new { name = "Tim", num = 40.5f },
new { name = "John", num = -10.25f },
new { name = "Bob", num = 100.45f }
};
Assert.Equal(100.45f, source.AsQueryable().Max(e => e.num));
}
[Fact]
public void NullSingleSourceWithSelector()
{
AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<float>)null).Max(i => i));
}
[Fact]
public void SingleSourceWithNullSelector()
{
Expression<Func<float, float>> selector = null;
AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<float>().AsQueryable().Max(selector));
}
[Fact]
public void EmptySingleWithSelector()
{
Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<float>().AsQueryable().Max(x => x));
}
[Fact]
public void EmptyDoubleWithSelector()
{
Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<double>().AsQueryable().Max(x => x));
}
[Fact]
public void NullDoubleSourceWithSelector()
{
AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<double>)null).Max(i => i));
}
[Fact]
public void DoubleSourceWithNullSelector()
{
Expression<Func<double, double>> selector = null;
AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<double>().AsQueryable().Max(selector));
}
[Fact]
public void MaxDoubleWithSelectorAccessingField()
{
var source = new[]{
new { name="Tim", num=40.5 },
new { name="John", num=-10.25 },
new { name="Bob", num=100.45 }
};
Assert.Equal(100.45, source.AsQueryable().Max(e => e.num));
}
[Fact]
public void EmptyDecimalWithSelector()
{
Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<decimal>().AsQueryable().Max(x => x));
}
[Fact]
public void NullDecimalSourceWithSelector()
{
AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<decimal>)null).Max(i => i));
}
[Fact]
public void DecimalSourceWithNullSelector()
{
Expression<Func<decimal, decimal>> selector = null;
AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<decimal>().AsQueryable().Max(selector));
}
[Fact]
public void MaxDecimalWithSelectorAccessingProperty()
{
var source = new[]{
new { name="Tim", num=420.5m },
new { name="John", num=900.25m },
new { name="Bob", num=10.45m }
};
Assert.Equal(900.25m, source.AsQueryable().Max(e => e.num));
}
[Fact]
public void EmptyNullableInt32WithSelector()
{
Assert.Null(Enumerable.Empty<int?>().Max(x => x));
}
[Fact]
public void NullNullableInt32SourceWithSelector()
{
AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<int?>)null).Max(i => i));
}
[Fact]
public void NullableInt32SourceWithNullSelector()
{
Expression<Func<int?, int?>> selector = null;
AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<int?>().AsQueryable().Max(selector));
}
[Fact]
public void MaxNullableInt32WithSelectorAccessingField()
{
var source = new[]{
new { name="Tim", num=(int?)10 },
new { name="John", num=(int?)-105 },
new { name="Bob", num=(int?)null }
};
Assert.Equal(10, source.AsQueryable().Max(e => e.num));
}
[Fact]
public void EmptyNullableInt64WithSelector()
{
Assert.Null(Enumerable.Empty<long?>().AsQueryable().Max(x => x));
}
[Fact]
public void NullNullableInt64SourceWithSelector()
{
AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<long?>)null).Max(i => i));
}
[Fact]
public void NullableInt64SourceWithNullSelector()
{
Expression<Func<long?, long?>> selector = null;
AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<long?>().AsQueryable().Max(selector));
}
[Fact]
public void MaxNullableInt64WithSelectorAccessingField()
{
var source = new[]{
new {name="Tim", num=default(long?) },
new {name="John", num=(long?)-105L },
new {name="Bob", num=(long?)long.MaxValue }
};
Assert.Equal(long.MaxValue, source.AsQueryable().Max(e => e.num));
}
[Fact]
public void EmptyNullableSingleWithSelector()
{
Assert.Null(Enumerable.Empty<float?>().AsQueryable().Max(x => x));
}
[Fact]
public void NullNullableSingleSourceWithSelector()
{
AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<float?>)null).Max(i => i));
}
[Fact]
public void NullableSingleSourceWithNullSelector()
{
Expression<Func<float?, float?>> selector = null;
AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<float?>().AsQueryable().Max(selector));
}
[Fact]
public void MaxNullableSingleWithSelectorAccessingProperty()
{
var source = new[]
{
new { name="Tim", num=(float?)40.5f },
new { name="John", num=(float?)null },
new { name="Bob", num=(float?)100.45f }
};
Assert.Equal(100.45f, source.AsQueryable().Max(e => e.num));
}
[Fact]
public void EmptyNullableDoubleWithSelector()
{
Assert.Null(Enumerable.Empty<double?>().AsQueryable().Max(x => x));
}
[Fact]
public void NullNullableDoubleSourceWithSelector()
{
AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<double?>)null).Max(i => i));
}
[Fact]
public void NullableDoubleSourceWithNullSelector()
{
Expression<Func<double?, double?>> selector = null;
AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<double?>().AsQueryable().Max(selector));
}
[Fact]
public void MaxNullableDoubleWithSelectorAccessingProperty()
{
var source = new []{
new { name = "Tim", num = (double?)40.5},
new { name = "John", num = default(double?)},
new { name = "Bob", num = (double?)100.45}
};
Assert.Equal(100.45, source.AsQueryable().Max(e => e.num));
}
[Fact]
public void EmptyNullableDecimalWithSelector()
{
Assert.Null(Enumerable.Empty<decimal?>().AsQueryable().Max(x => x));
}
[Fact]
public void NullNullableDecimalSourceWithSelector()
{
AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<decimal?>)null).Max(i => i));
}
[Fact]
public void NullableDecimalSourceWithNullSelector()
{
Expression<Func<decimal?, decimal?>> selector = null;
AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<decimal?>().AsQueryable().Max(selector));
}
[Fact]
public void MaxNullableDecimalWithSelectorAccessingProperty()
{
var source = new[] {
new { name="Tim", num=(decimal?)420.5m },
new { name="John", num=default(decimal?) },
new { name="Bob", num=(decimal?)10.45m }
};
Assert.Equal(420.5m, source.AsQueryable().Max(e => e.num));
}
[Fact]
public void EmptyNullableDateTimeWithSelector()
{
Assert.Null(Enumerable.Empty<DateTime?>().AsQueryable().Max(x => x));
}
[Fact]
public void NullNullableDateTimeSourceWithSelector()
{
AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<DateTime?>)null).Max(i => i));
}
[Fact]
public void NullableDateTimeSourceWithNullSelector()
{
Expression<Func<DateTime?, DateTime?>> selector = null;
AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<DateTime?>().AsQueryable().Max(selector));
}
[Fact]
public void EmptyStringSourceWithSelector()
{
Assert.Null(Enumerable.Empty<string>().AsQueryable().Max(x => x));
}
[Fact]
public void NullStringSourceWithSelector()
{
AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<string>)null).Max(i => i));
}
[Fact]
public void StringSourceWithNullSelector()
{
Expression<Func<string, string>> selector = null;
AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<string>().AsQueryable().Max(selector));
}
[Fact]
public void MaxStringWithSelectorAccessingProperty()
{
var source = new[]{
new { name="Tim", num=420.5m },
new { name="John", num=900.25m },
new { name="Bob", num=10.45m }
};
Assert.Equal("Tim", source.AsQueryable().Max(e => e.name));
}
[Fact]
public void EmptyBoolean()
{
Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<bool>().AsQueryable().Max());
}
[Fact]
public void Max1()
{
var val = (new int[] { 0, 2, 1 }).AsQueryable().Max();
Assert.Equal(2, val);
}
[Fact]
public void Max2()
{
var val = (new int[] { 0, 2, 1 }).AsQueryable().Max(n => n);
Assert.Equal(2, val);
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.Sql
{
using Microsoft.Azure;
using Microsoft.Azure.Management;
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
using System.Collections;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// SyncMembersOperations operations.
/// </summary>
public partial interface ISyncMembersOperations
{
/// <summary>
/// Gets a sync member.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can
/// obtain this value from the Azure Resource Manager API or the
/// portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group on which the sync member is hosted.
/// </param>
/// <param name='syncMemberName'>
/// The name of the sync member.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<SyncMember>> GetWithHttpMessagesAsync(string resourceGroupName, string serverName, string databaseName, string syncGroupName, string syncMemberName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Creates or updates a sync member.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can
/// obtain this value from the Azure Resource Manager API or the
/// portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group on which the sync member is hosted.
/// </param>
/// <param name='syncMemberName'>
/// The name of the sync member.
/// </param>
/// <param name='parameters'>
/// The requested sync member resource state.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<SyncMember>> CreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string serverName, string databaseName, string syncGroupName, string syncMemberName, SyncMember parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Deletes a sync member.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can
/// obtain this value from the Azure Resource Manager API or the
/// portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group on which the sync member is hosted.
/// </param>
/// <param name='syncMemberName'>
/// The name of the sync member.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse> DeleteWithHttpMessagesAsync(string resourceGroupName, string serverName, string databaseName, string syncGroupName, string syncMemberName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Updates an existing sync member.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can
/// obtain this value from the Azure Resource Manager API or the
/// portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group on which the sync member is hosted.
/// </param>
/// <param name='syncMemberName'>
/// The name of the sync member.
/// </param>
/// <param name='parameters'>
/// The requested sync member resource state.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<SyncMember>> UpdateWithHttpMessagesAsync(string resourceGroupName, string serverName, string databaseName, string syncGroupName, string syncMemberName, SyncMember parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Lists sync members in the given sync group.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can
/// obtain this value from the Azure Resource Manager API or the
/// portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<IPage<SyncMember>>> ListBySyncGroupWithHttpMessagesAsync(string resourceGroupName, string serverName, string databaseName, string syncGroupName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Gets a sync member database schema.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can
/// obtain this value from the Azure Resource Manager API or the
/// portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group on which the sync member is hosted.
/// </param>
/// <param name='syncMemberName'>
/// The name of the sync member.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<IPage<SyncFullSchemaProperties>>> ListMemberSchemasWithHttpMessagesAsync(string resourceGroupName, string serverName, string databaseName, string syncGroupName, string syncMemberName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Refreshes a sync member database schema.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can
/// obtain this value from the Azure Resource Manager API or the
/// portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group on which the sync member is hosted.
/// </param>
/// <param name='syncMemberName'>
/// The name of the sync member.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse> RefreshMemberSchemaWithHttpMessagesAsync(string resourceGroupName, string serverName, string databaseName, string syncGroupName, string syncMemberName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Creates or updates a sync member.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can
/// obtain this value from the Azure Resource Manager API or the
/// portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group on which the sync member is hosted.
/// </param>
/// <param name='syncMemberName'>
/// The name of the sync member.
/// </param>
/// <param name='parameters'>
/// The requested sync member resource state.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<SyncMember>> BeginCreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string serverName, string databaseName, string syncGroupName, string syncMemberName, SyncMember parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Deletes a sync member.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can
/// obtain this value from the Azure Resource Manager API or the
/// portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group on which the sync member is hosted.
/// </param>
/// <param name='syncMemberName'>
/// The name of the sync member.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse> BeginDeleteWithHttpMessagesAsync(string resourceGroupName, string serverName, string databaseName, string syncGroupName, string syncMemberName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Updates an existing sync member.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can
/// obtain this value from the Azure Resource Manager API or the
/// portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group on which the sync member is hosted.
/// </param>
/// <param name='syncMemberName'>
/// The name of the sync member.
/// </param>
/// <param name='parameters'>
/// The requested sync member resource state.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<SyncMember>> BeginUpdateWithHttpMessagesAsync(string resourceGroupName, string serverName, string databaseName, string syncGroupName, string syncMemberName, SyncMember parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Refreshes a sync member database schema.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can
/// obtain this value from the Azure Resource Manager API or the
/// portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group on which the sync member is hosted.
/// </param>
/// <param name='syncMemberName'>
/// The name of the sync member.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse> BeginRefreshMemberSchemaWithHttpMessagesAsync(string resourceGroupName, string serverName, string databaseName, string syncGroupName, string syncMemberName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Lists sync members in the given sync group.
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<IPage<SyncMember>>> ListBySyncGroupNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Gets a sync member database schema.
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<IPage<SyncFullSchemaProperties>>> ListMemberSchemasNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
}
}
| |
/*
* Copyright 2013 ThirdMotion, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @class strange.extensions.context.impl.MVCSContext
*
* The recommended Context for getting the most out of StrangeIoC.
*
* By extending this Context, you get the entire
* all-singing/all-dancing version of Strange, as it was shipped from the
* warehouse and ready for you to map your dependencies.
*
* As the name suggests, MVCSContext provides structure for
* app development using the classic <a href="http://en.wikipedia.org/wiki/Model%E2%80%93view%E2%80%93controller">MVC (Model-View-Controller)</a>
* design pattern, and adds 'S' (Service) for asynchronous calls outside
* the application. Strange is highly modular, so you needn't use
* MVCSContext if you don't want to (you can extend Context or CrossContext directly)
* but MVCS is a highly proven design strategy and MVCSContext is by far the easiest
* way to get familiar with what Strange has to offer.
*
* The parts:
* <ul>
* <li>contextView</li>
*
* The GameObject at the top of your display hierarchy. Attach a subclass of
* ContextView to a GameObject, then instantiate a subclass of MVCSContext
* to start the app.
*
* Example:
public class MyProjectRoot : ContextView
{
void Awake()
{
context = new MyContext(this); //Extends MVCSContext
}
}
*
* The contextView is automatically injected into all Mediators
* and available for injection into commands like so:
[Inject(ContextKeys.CONTEXT_VIEW)]
public GameObject contextView{get;set;}
* It is strongly advised that the contextView NOT be injected into
* Views, Models or Services.
*
* <li>injectionBinder</li>
*
* Maps dependencies to concrete classes or values.
*
* Examples:
injectionBinder.Bind<ISpaceship>().To<TieFighter>(); //Injects a new TieFighter wherever an ISpaceship is requested
injectionBinder.Bind<ISpaceship>().To<Starship>().ToName(Ships.ENTERPRISE); //Injects a Starship wherever an ISpaceship is requested with the Name qualifier Enterprise
injectionBinder.Bind<ITool>().To<SonicScrewdriver>().ToSingleton(); //Injects SonicScrewdriver as a Singleton wherever an ITool is requested
injectionBinder.Bind<IMoonbase>().ToValue(new Alpha()); //Injects the provided instance wherever IMoonbase is requested
injectionBinder.Bind<ISpaceship>().Bind<ITimeShip>.To<Tardis>(); //Injects a new Tardis wherever EITHER ISpaceship or ITimeship is requested.
* `injectionBinder` is automatically injected into all commands and may
* be injected elsewhere with:
[Inject]
public IInjectionBinder injectionBinder{ get; set;}
* <li>dispatcher</li>
*
* The event bus shared across the context. Informs listeners
* and triggers commands.
*
* `dispatcher` is injected into all EventMediators, EventCommands
* and EventSequenceCommands, and may be injected elsewhere with:
[Inject(ContextKeys.CONTEXT_DISPATCHER)]
public IEventDispatcher dispatcher{ get; set;}
* For examples, see IEventDispatcher. Generally you don't map the dispatcher's
* events to methods inside the Context. Rather, you map Commands and Sequences.
* Read on!
*
* <li>crossContextDispatcher</li>
*
* A second event bus for sending events between contexts. It
* should only be accessed from Commands or SequenceCommands,
* into which it may be injected by declaring the dependency:
[Inject(ContextKeys.CROSS_CONTEXT_DISPATCHER)]
public IEventDispatcher dispatcher{ get; set;}
*
* <li>commandBinder</li>
*
* Maps events that result in the creation and execution of Commands.
* Events from dispatcher can be used to trigger Commands.
*
* `commandBinder` is automatically injected into all Commands.
*
* Examples:
commandBinder.Bind(GameEvent.MISSILE_HIT).To<MissileHitCommand>(); //MissileHitCommand fires whenever MISSILE_HIT is dispatched
commandBinder.Bind(GameEvent.MISSILE_HIT).To<IncrementScoreCommand>().To<UpdateServerCommand>(); //Both Commands fire
commandBinder.Bind(ContextEvent.START).To<StartCommand>().Once(); //StartCommand fires when START fires, then unmaps itself
*
* <li>sequencer</li>
*
* Maps events that result in the creation and execution of Sequences,
* which are just like Commands, except they run sequentially, rather than
* in parallel.
*
* 'sequencer' is automatically injected into all SequenceCommands.
*
* In the following example, `TestMissileHitCommand` runs logic to determine
* whether the missile hit is valid. If it's not, it may call `BreakSeqeunce()`.
* so neither of the other Commands will fire.
sequencer.Bind(GameEvent.MISSILE_HIT).To<TestMissileHitCommand>().To<IncrementScoreCommand>().To<UpdateServerCommand>();
*
* <li>mediationBinder</li>
*
* Maps Views to Mediators in order to insultate Views from direct
* linkage to the application.
*
* MediationBinder isn't automatically injected anywhere. It is
* possible, however, that you might want to change mediation bindings
* at runtime. This might prove difficult as a practical matter, but
* if you want to experiment, feel free to inject `mediationBinder`
* into Commands or SequenceCommands like so:
[Inject]
IMediationBinder mediationBinder{get;set;}
*
* Example:
mediationBinder.Bind<RobotView>().To<RobotMediator>();
* <ul>
*
*
*/
using strange.extensions.implicitBind.api;
using strange.extensions.implicitBind.impl;
using UnityEngine;
using strange.extensions.command.api;
using strange.extensions.command.impl;
using strange.extensions.context.api;
using strange.extensions.dispatcher.api;
using strange.extensions.dispatcher.eventdispatcher.api;
using strange.extensions.dispatcher.eventdispatcher.impl;
using strange.extensions.injector.api;
using strange.extensions.mediation.api;
using strange.extensions.mediation.impl;
using strange.extensions.sequencer.api;
using strange.extensions.sequencer.impl;
using strange.framework.api;
using strange.framework.impl;
namespace strange.extensions.context.impl
{
public class MVCSContext : CrossContext
{
/// A Binder that maps Events to Commands
public ICommandBinder commandBinder{get;set;}
/// A Binder that serves as the Event bus for the Context
public IEventDispatcher dispatcher{get;set;}
/// A Binder that maps Views to Mediators
public IMediationBinder mediationBinder{get;set;}
//Interprets implicit bindings
public IImplicitBinder implicitBinder { get; set; }
/// A Binder that maps Events to Sequences
public ISequencer sequencer{get;set;}
/// A list of Views Awake before the Context is fully set up.
protected static ISemiBinding viewCache = new SemiBinding();
public MVCSContext() : base()
{}
/// The recommended Constructor
/// Just pass in the instance of your ContextView. Everything will begin automatically.
/// Other constructors offer the option of interrupting startup at useful moments.
public MVCSContext(MonoBehaviour view) : base(view)
{
}
public MVCSContext(MonoBehaviour view, ContextStartupFlags flags) : base(view, flags)
{
}
public MVCSContext(MonoBehaviour view, bool autoMapping) : base(view, autoMapping)
{
}
override public IContext SetContextView(object view)
{
contextView = (view as MonoBehaviour).gameObject;
if (contextView == null)
{
throw new ContextException("MVCSContext requires a ContextView of type MonoBehaviour", ContextExceptionType.NO_CONTEXT_VIEW);
}
return this;
}
/// Map the relationships between the Binders.
/// Although you can override this method, it is recommended
/// that you provide all your application bindings in `mapBindings()`.
protected override void addCoreComponents()
{
base.addCoreComponents();
injectionBinder.Bind<IInstanceProvider>().Bind<IInjectionBinder>().ToValue(injectionBinder);
injectionBinder.Bind<IContext>().ToValue(this).ToName(ContextKeys.CONTEXT);
injectionBinder.Bind<ICommandBinder>().To<EventCommandBinder>().ToSingleton();
//This binding is for local dispatchers
injectionBinder.Bind<IEventDispatcher>().To<EventDispatcher>();
//This binding is for the common system bus
injectionBinder.Bind<IEventDispatcher>().To<EventDispatcher>().ToSingleton().ToName(ContextKeys.CONTEXT_DISPATCHER);
injectionBinder.Bind<IMediationBinder>().To<MediationBinder>().ToSingleton();
injectionBinder.Bind<ISequencer>().To<EventSequencer>().ToSingleton();
injectionBinder.Bind<IImplicitBinder>().To<ImplicitBinder>().ToSingleton();
}
protected override void instantiateCoreComponents()
{
base.instantiateCoreComponents();
if (contextView == null)
{
throw new ContextException("MVCSContext requires a ContextView of type MonoBehaviour", ContextExceptionType.NO_CONTEXT_VIEW);
}
injectionBinder.Bind<GameObject>().ToValue(contextView).ToName(ContextKeys.CONTEXT_VIEW);
commandBinder = injectionBinder.GetInstance<ICommandBinder>() as ICommandBinder;
dispatcher = injectionBinder.GetInstance<IEventDispatcher>(ContextKeys.CONTEXT_DISPATCHER) as IEventDispatcher;
mediationBinder = injectionBinder.GetInstance<IMediationBinder>() as IMediationBinder;
sequencer = injectionBinder.GetInstance<ISequencer>() as ISequencer;
implicitBinder = injectionBinder.GetInstance<IImplicitBinder>() as IImplicitBinder;
(dispatcher as ITriggerProvider).AddTriggerable(commandBinder as ITriggerable);
(dispatcher as ITriggerProvider).AddTriggerable(sequencer as ITriggerable);
}
protected override void postBindings()
{
//It's possible for views to fire their Awake before bindings. This catches any early risers and attaches their Mediators.
mediateViewCache();
//Ensure that all Views underneath the ContextView are triggered
mediationBinder.Trigger(MediationEvent.AWAKE, (contextView as GameObject).GetComponent<ContextView>());
}
/// Fires ContextEvent.START
/// Whatever Command/Sequence you want to happen first should
/// be mapped to this event.
public override void Launch()
{
dispatcher.Dispatch(ContextEvent.START);
}
/// Gets an instance of the provided generic type.
/// Always bear in mind that doing this risks adding
/// dependencies that must be cleaned up when Contexts
/// are removed.
override public object GetComponent<T>()
{
return GetComponent<T>(null);
}
/// Gets an instance of the provided generic type and name from the InjectionBinder
/// Always bear in mind that doing this risks adding
/// dependencies that must be cleaned up when Contexts
/// are removed.
override public object GetComponent<T>(object name)
{
IInjectionBinding binding = injectionBinder.GetBinding<T>(name);
if (binding != null)
{
return injectionBinder.GetInstance<T>(name);
}
return null;
}
override public void AddView(object view)
{
if (mediationBinder != null)
{
mediationBinder.Trigger(MediationEvent.AWAKE, view as IView);
}
else
{
cacheView(view as MonoBehaviour);
}
}
override public void RemoveView(object view)
{
mediationBinder.Trigger(MediationEvent.DESTROYED, view as IView);
}
/// Caches early-riser Views.
///
/// If a View is on stage at startup, it's possible for that
/// View to be Awake before this Context has finished initing.
/// `cacheView()` maintains a list of such 'early-risers'
/// until the Context is ready to mediate them.
virtual protected void cacheView(MonoBehaviour view)
{
if (viewCache.constraint.Equals(BindingConstraintType.ONE))
{
viewCache.constraint = BindingConstraintType.MANY;
}
viewCache.Add(view);
}
/// Provide mediation for early-riser Views
virtual protected void mediateViewCache()
{
if (mediationBinder == null)
throw new ContextException("MVCSContext cannot mediate views without a mediationBinder", ContextExceptionType.NO_MEDIATION_BINDER);
object[] values = viewCache.value as object[];
if (values == null)
{
return;
}
int aa = values.Length;
for (int a = 0; a < aa; a++)
{
mediationBinder.Trigger(MediationEvent.AWAKE, values[a] as IView);
}
viewCache = new SemiBinding();
}
/// Clean up. Called by a ContextView in its OnDestroy method
public override void OnRemove()
{
base.OnRemove();
commandBinder.OnRemove();
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Diagnostics;
using System.Reflection;
using Internal.NativeFormat;
namespace Internal.TypeSystem
{
public abstract partial class TypeSystemContext
{
public TypeSystemContext() : this(new TargetDetails(TargetArchitecture.Unknown, TargetOS.Unknown))
{
}
public TypeSystemContext(TargetDetails target)
{
Target = target;
_instantiatedTypes = new InstantiatedTypeKey.InstantiatedTypeKeyHashtable();
_arrayTypes = new ArrayTypeKey.ArrayTypeKeyHashtable();
_byRefTypes = new ByRefHashtable();
_pointerTypes = new PointerHashtable();
_instantiatedMethods = new InstantiatedMethodKey.InstantiatedMethodKeyHashtable();
_methodForInstantiatedTypes = new MethodForInstantiatedTypeKey.MethodForInstantiatedTypeKeyHashtable();
_fieldForInstantiatedTypes = new FieldForInstantiatedTypeKey.FieldForInstantiatedTypeKeyHashtable();
_signatureVariables = new SignatureVariableHashtable(this);
}
public TargetDetails Target
{
get; private set;
}
public ModuleDesc SystemModule
{
get;
private set;
}
protected void InitializeSystemModule(ModuleDesc systemModule)
{
Debug.Assert(SystemModule == null);
SystemModule = systemModule;
}
public abstract DefType GetWellKnownType(WellKnownType wellKnownType);
public virtual ModuleDesc ResolveAssembly(AssemblyName name, bool throwIfNotFound = true)
{
if (throwIfNotFound)
throw new NotSupportedException();
return null;
}
//
// Array types
//
public ArrayType GetArrayType(TypeDesc elementType)
{
return GetArrayType(elementType, -1);
}
//
// MDArray types
//
private struct ArrayTypeKey
{
private TypeDesc _elementType;
private int _rank;
public ArrayTypeKey(TypeDesc elementType, int rank)
{
_elementType = elementType;
_rank = rank;
}
public TypeDesc ElementType
{
get
{
return _elementType;
}
}
public int Rank
{
get
{
return _rank;
}
}
public class ArrayTypeKeyHashtable : LockFreeReaderHashtable<ArrayTypeKey, ArrayType>
{
protected override int GetKeyHashCode(ArrayTypeKey key)
{
return TypeHashingAlgorithms.ComputeArrayTypeHashCode(key._elementType, key._rank);
}
protected override int GetValueHashCode(ArrayType value)
{
return TypeHashingAlgorithms.ComputeArrayTypeHashCode(value.ElementType, value.IsSzArray ? -1 : value.Rank);
}
protected override bool CompareKeyToValue(ArrayTypeKey key, ArrayType value)
{
if (key._elementType != value.ElementType)
return false;
if ((key._rank == -1) && value.IsSzArray)
return true;
return key._rank == value.Rank;
}
protected override bool CompareValueToValue(ArrayType value1, ArrayType value2)
{
return (value1.ElementType == value2.ElementType) && (value1.Rank == value2.Rank) && value1.IsSzArray == value2.IsSzArray;
}
protected override ArrayType CreateValueFromKey(ArrayTypeKey key)
{
return new ArrayType(key.ElementType, key.Rank);
}
}
}
private ArrayTypeKey.ArrayTypeKeyHashtable _arrayTypes;
public ArrayType GetArrayType(TypeDesc elementType, int rank)
{
return _arrayTypes.GetOrCreateValue(new ArrayTypeKey(elementType, rank));
}
//
// ByRef types
//
public class ByRefHashtable : LockFreeReaderHashtable<TypeDesc, ByRefType>
{
protected override int GetKeyHashCode(TypeDesc key)
{
return key.GetHashCode();
}
protected override int GetValueHashCode(ByRefType value)
{
return value.ParameterType.GetHashCode();
}
protected override bool CompareKeyToValue(TypeDesc key, ByRefType value)
{
return key == value.ParameterType;
}
protected override bool CompareValueToValue(ByRefType value1, ByRefType value2)
{
return value1.ParameterType == value2.ParameterType;
}
protected override ByRefType CreateValueFromKey(TypeDesc key)
{
return new ByRefType(key);
}
}
private ByRefHashtable _byRefTypes;
public ByRefType GetByRefType(TypeDesc parameterType)
{
return _byRefTypes.GetOrCreateValue(parameterType);
}
//
// Pointer types
//
public class PointerHashtable : LockFreeReaderHashtable<TypeDesc, PointerType>
{
protected override int GetKeyHashCode(TypeDesc key)
{
return key.GetHashCode();
}
protected override int GetValueHashCode(PointerType value)
{
return value.ParameterType.GetHashCode();
}
protected override bool CompareKeyToValue(TypeDesc key, PointerType value)
{
return key == value.ParameterType;
}
protected override bool CompareValueToValue(PointerType value1, PointerType value2)
{
return value1.ParameterType == value2.ParameterType;
}
protected override PointerType CreateValueFromKey(TypeDesc key)
{
return new PointerType(key);
}
}
private PointerHashtable _pointerTypes;
public PointerType GetPointerType(TypeDesc parameterType)
{
return _pointerTypes.GetOrCreateValue(parameterType);
}
//
// Instantiated types
//
private struct InstantiatedTypeKey
{
private TypeDesc _typeDef;
private Instantiation _instantiation;
public InstantiatedTypeKey(TypeDesc typeDef, Instantiation instantiation)
{
_typeDef = typeDef;
_instantiation = instantiation;
}
public TypeDesc TypeDef
{
get
{
return _typeDef;
}
}
public Instantiation Instantiation
{
get
{
return _instantiation;
}
}
public class InstantiatedTypeKeyHashtable : LockFreeReaderHashtable<InstantiatedTypeKey, InstantiatedType>
{
protected override int GetKeyHashCode(InstantiatedTypeKey key)
{
return key._instantiation.ComputeGenericInstanceHashCode(key._typeDef.GetHashCode());
}
protected override int GetValueHashCode(InstantiatedType value)
{
return value.Instantiation.ComputeGenericInstanceHashCode(value.GetTypeDefinition().GetHashCode());
}
protected override bool CompareKeyToValue(InstantiatedTypeKey key, InstantiatedType value)
{
if (key._typeDef != value.GetTypeDefinition())
return false;
Instantiation valueInstantiation = value.Instantiation;
if (key._instantiation.Length != valueInstantiation.Length)
return false;
for (int i = 0; i < key._instantiation.Length; i++)
{
if (key._instantiation[i] != valueInstantiation[i])
return false;
}
return true;
}
protected override bool CompareValueToValue(InstantiatedType value1, InstantiatedType value2)
{
if (value1.GetTypeDefinition() != value2.GetTypeDefinition())
return false;
Instantiation value1Instantiation = value1.Instantiation;
Instantiation value2Instantiation = value2.Instantiation;
if (value1Instantiation.Length != value2Instantiation.Length)
return false;
for (int i = 0; i < value1Instantiation.Length; i++)
{
if (value1Instantiation[i] != value2Instantiation[i])
return false;
}
return true;
}
protected override InstantiatedType CreateValueFromKey(InstantiatedTypeKey key)
{
return new InstantiatedType((MetadataType)key.TypeDef, key.Instantiation);
}
}
}
private InstantiatedTypeKey.InstantiatedTypeKeyHashtable _instantiatedTypes;
public InstantiatedType GetInstantiatedType(MetadataType typeDef, Instantiation instantiation)
{
return _instantiatedTypes.GetOrCreateValue(new InstantiatedTypeKey(typeDef, instantiation));
}
//
// Instantiated methods
//
private struct InstantiatedMethodKey
{
private MethodDesc _methodDef;
private Instantiation _instantiation;
private int _hashcode;
public InstantiatedMethodKey(MethodDesc methodDef, Instantiation instantiation)
{
_methodDef = methodDef;
_instantiation = instantiation;
_hashcode = TypeHashingAlgorithms.ComputeMethodHashCode(methodDef.OwningType.GetHashCode(),
instantiation.ComputeGenericInstanceHashCode(TypeHashingAlgorithms.ComputeNameHashCode(methodDef.Name)));
}
public MethodDesc MethodDef
{
get
{
return _methodDef;
}
}
public Instantiation Instantiation
{
get
{
return _instantiation;
}
}
public class InstantiatedMethodKeyHashtable : LockFreeReaderHashtable<InstantiatedMethodKey, InstantiatedMethod>
{
protected override int GetKeyHashCode(InstantiatedMethodKey key)
{
return key._hashcode;
}
protected override int GetValueHashCode(InstantiatedMethod value)
{
return value.GetHashCode();
}
protected override bool CompareKeyToValue(InstantiatedMethodKey key, InstantiatedMethod value)
{
if (key._methodDef != value.GetMethodDefinition())
return false;
Instantiation valueInstantiation = value.Instantiation;
if (key._instantiation.Length != valueInstantiation.Length)
return false;
for (int i = 0; i < key._instantiation.Length; i++)
{
if (key._instantiation[i] != valueInstantiation[i])
return false;
}
return true;
}
protected override bool CompareValueToValue(InstantiatedMethod value1, InstantiatedMethod value2)
{
if (value1.GetMethodDefinition() != value2.GetMethodDefinition())
return false;
Instantiation value1Instantiation = value1.Instantiation;
Instantiation value2Instantiation = value2.Instantiation;
if (value1Instantiation.Length != value2Instantiation.Length)
return false;
for (int i = 0; i < value1Instantiation.Length; i++)
{
if (value1Instantiation[i] != value2Instantiation[i])
return false;
}
return true;
}
protected override InstantiatedMethod CreateValueFromKey(InstantiatedMethodKey key)
{
return new InstantiatedMethod(key.MethodDef, key.Instantiation, key._hashcode);
}
}
}
private InstantiatedMethodKey.InstantiatedMethodKeyHashtable _instantiatedMethods;
public InstantiatedMethod GetInstantiatedMethod(MethodDesc methodDef, Instantiation instantiation)
{
Debug.Assert(!(methodDef is InstantiatedMethod));
return _instantiatedMethods.GetOrCreateValue(new InstantiatedMethodKey(methodDef, instantiation));
}
//
// Methods for instantiated type
//
private struct MethodForInstantiatedTypeKey
{
private MethodDesc _typicalMethodDef;
private InstantiatedType _instantiatedType;
private int _hashcode;
public MethodForInstantiatedTypeKey(MethodDesc typicalMethodDef, InstantiatedType instantiatedType)
{
_typicalMethodDef = typicalMethodDef;
_instantiatedType = instantiatedType;
_hashcode = TypeHashingAlgorithms.ComputeMethodHashCode(instantiatedType.GetHashCode(), TypeHashingAlgorithms.ComputeNameHashCode(typicalMethodDef.Name));
}
public MethodDesc TypicalMethodDef
{
get
{
return _typicalMethodDef;
}
}
public InstantiatedType InstantiatedType
{
get
{
return _instantiatedType;
}
}
public class MethodForInstantiatedTypeKeyHashtable : LockFreeReaderHashtable<MethodForInstantiatedTypeKey, MethodForInstantiatedType>
{
protected override int GetKeyHashCode(MethodForInstantiatedTypeKey key)
{
return key._hashcode;
}
protected override int GetValueHashCode(MethodForInstantiatedType value)
{
return value.GetHashCode();
}
protected override bool CompareKeyToValue(MethodForInstantiatedTypeKey key, MethodForInstantiatedType value)
{
if (key._typicalMethodDef != value.GetTypicalMethodDefinition())
return false;
return key._instantiatedType == value.OwningType;
}
protected override bool CompareValueToValue(MethodForInstantiatedType value1, MethodForInstantiatedType value2)
{
return (value1.GetTypicalMethodDefinition() == value2.GetTypicalMethodDefinition()) && (value1.OwningType == value2.OwningType);
}
protected override MethodForInstantiatedType CreateValueFromKey(MethodForInstantiatedTypeKey key)
{
return new MethodForInstantiatedType(key.TypicalMethodDef, key.InstantiatedType, key._hashcode);
}
}
}
private MethodForInstantiatedTypeKey.MethodForInstantiatedTypeKeyHashtable _methodForInstantiatedTypes;
public MethodDesc GetMethodForInstantiatedType(MethodDesc typicalMethodDef, InstantiatedType instantiatedType)
{
Debug.Assert(!(typicalMethodDef is MethodForInstantiatedType));
Debug.Assert(!(typicalMethodDef is InstantiatedMethod));
return _methodForInstantiatedTypes.GetOrCreateValue(new MethodForInstantiatedTypeKey(typicalMethodDef, instantiatedType));
}
//
// Fields for instantiated type
//
private struct FieldForInstantiatedTypeKey
{
private FieldDesc _fieldDef;
private InstantiatedType _instantiatedType;
public FieldForInstantiatedTypeKey(FieldDesc fieldDef, InstantiatedType instantiatedType)
{
_fieldDef = fieldDef;
_instantiatedType = instantiatedType;
}
public FieldDesc TypicalFieldDef
{
get
{
return _fieldDef;
}
}
public InstantiatedType InstantiatedType
{
get
{
return _instantiatedType;
}
}
public class FieldForInstantiatedTypeKeyHashtable : LockFreeReaderHashtable<FieldForInstantiatedTypeKey, FieldForInstantiatedType>
{
protected override int GetKeyHashCode(FieldForInstantiatedTypeKey key)
{
return key._fieldDef.GetHashCode() ^ key._instantiatedType.GetHashCode();
}
protected override int GetValueHashCode(FieldForInstantiatedType value)
{
return value.GetTypicalFieldDefinition().GetHashCode() ^ value.OwningType.GetHashCode();
}
protected override bool CompareKeyToValue(FieldForInstantiatedTypeKey key, FieldForInstantiatedType value)
{
if (key._fieldDef != value.GetTypicalFieldDefinition())
return false;
return key._instantiatedType == value.OwningType;
}
protected override bool CompareValueToValue(FieldForInstantiatedType value1, FieldForInstantiatedType value2)
{
return (value1.GetTypicalFieldDefinition() == value2.GetTypicalFieldDefinition()) && (value1.OwningType == value2.OwningType);
}
protected override FieldForInstantiatedType CreateValueFromKey(FieldForInstantiatedTypeKey key)
{
return new FieldForInstantiatedType(key.TypicalFieldDef, key.InstantiatedType);
}
}
}
private FieldForInstantiatedTypeKey.FieldForInstantiatedTypeKeyHashtable _fieldForInstantiatedTypes;
public FieldDesc GetFieldForInstantiatedType(FieldDesc fieldDef, InstantiatedType instantiatedType)
{
return _fieldForInstantiatedTypes.GetOrCreateValue(new FieldForInstantiatedTypeKey(fieldDef, instantiatedType));
}
//
// Signature variables
//
private class SignatureVariableHashtable : LockFreeReaderHashtable<uint, SignatureVariable>
{
private TypeSystemContext _context;
public SignatureVariableHashtable(TypeSystemContext context)
{
_context = context;
}
protected override int GetKeyHashCode(uint key)
{
return (int)key;
}
protected override int GetValueHashCode(SignatureVariable value)
{
uint combinedIndex = value.IsMethodSignatureVariable ? ((uint)value.Index | 0x80000000) : (uint)value.Index;
return (int)combinedIndex;
}
protected override bool CompareKeyToValue(uint key, SignatureVariable value)
{
uint combinedIndex = value.IsMethodSignatureVariable ? ((uint)value.Index | 0x80000000) : (uint)value.Index;
return key == combinedIndex;
}
protected override bool CompareValueToValue(SignatureVariable value1, SignatureVariable value2)
{
uint combinedIndex1 = value1.IsMethodSignatureVariable ? ((uint)value1.Index | 0x80000000) : (uint)value1.Index;
uint combinedIndex2 = value2.IsMethodSignatureVariable ? ((uint)value2.Index | 0x80000000) : (uint)value2.Index;
return combinedIndex1 == combinedIndex2;
}
protected override SignatureVariable CreateValueFromKey(uint key)
{
bool method = ((key & 0x80000000) != 0);
int index = (int)(key & 0x7FFFFFFF);
if (method)
return new SignatureMethodVariable(_context, index);
else
return new SignatureTypeVariable(_context, index);
}
}
private SignatureVariableHashtable _signatureVariables;
public TypeDesc GetSignatureVariable(int index, bool method)
{
if (index < 0)
throw new BadImageFormatException();
uint combinedIndex = method ? ((uint)index | 0x80000000) : (uint)index;
return _signatureVariables.GetOrCreateValue(combinedIndex);
}
/// <summary>
/// Abstraction to allow the type system context to affect the field layout
/// algorithm used by types to lay themselves out.
/// </summary>
public virtual FieldLayoutAlgorithm GetLayoutAlgorithmForType(DefType type)
{
// Type system contexts that support computing field layout need to override this.
throw new NotSupportedException();
}
/// <summary>
/// Abstraction to allow the type system context to control the interfaces
/// algorithm used by types.
/// </summary>
public RuntimeInterfacesAlgorithm GetRuntimeInterfacesAlgorithmForType(TypeDesc type)
{
if (type.IsDefType)
{
return GetRuntimeInterfacesAlgorithmForDefType((DefType)type);
}
else if (type.IsArray)
{
ArrayType arrType = (ArrayType)type;
if (arrType.IsSzArray && !arrType.ElementType.IsPointer)
{
return GetRuntimeInterfacesAlgorithmForNonPointerArrayType((ArrayType)type);
}
else
{
return BaseTypeRuntimeInterfacesAlgorithm.Instance;
}
}
return null;
}
/// <summary>
/// Abstraction to allow the type system context to control the interfaces
/// algorithm used by types.
/// </summary>
protected virtual RuntimeInterfacesAlgorithm GetRuntimeInterfacesAlgorithmForDefType(DefType type)
{
// Type system contexts that support computing runtime interfaces need to override this.
throw new NotSupportedException();
}
/// <summary>
/// Abstraction to allow the type system context to control the interfaces
/// algorithm used by single dimensional array types.
/// </summary>
protected virtual RuntimeInterfacesAlgorithm GetRuntimeInterfacesAlgorithmForNonPointerArrayType(ArrayType type)
{
// Type system contexts that support computing runtime interfaces need to override this.
throw new NotSupportedException();
}
public virtual VirtualMethodAlgorithm GetVirtualMethodAlgorithmForType(TypeDesc type)
{
// Type system contexts that support virtual method resolution need to override this.
throw new NotSupportedException();
}
public virtual VirtualMethodEnumerationAlgorithm GetVirtualMethodEnumerationAlgorithmForType(TypeDesc type)
{
// Type system contexts that support this need to override this.
throw new NotSupportedException();
}
}
}
| |
#if !BESTHTTP_DISABLE_CACHING && (!UNITY_WEBGL || UNITY_EDITOR)
using System;
using System.Collections.Generic;
using System.Text;
using System.Threading;
#if NETFX_CORE
using FileStream = BestHTTP.PlatformSupport.IO.FileStream;
using Directory = BestHTTP.PlatformSupport.IO.Directory;
using File = BestHTTP.PlatformSupport.IO.File;
using BestHTTP.PlatformSupport.IO;
//Disable CD4014: Because this call is not awaited, execution of the current method continues before the call is completed. Consider applying the 'await' operator to the result of the call.
#pragma warning disable 4014
#else
using FileStream = System.IO.FileStream;
using Directory = System.IO.Directory;
using System.IO;
#endif
//
// Version 1: Initial release
// Version 2: Filenames are generated from an index.
//
namespace BestHTTP.Caching
{
using BestHTTP.Extensions;
public static class HTTPCacheService
{
#region Properties & Fields
/// <summary>
/// Library file-format versioning support
/// </summary>
private const int LibraryVersion = 2;
public static bool IsSupported
{
get
{
if (IsSupportCheckDone)
return isSupported;
try
{
File.Exists(HTTPManager.GetRootCacheFolder());
isSupported = true;
}
catch
{
isSupported = false;
HTTPManager.Logger.Warning("HTTPCacheService", "Cache Service Disabled!");
}
finally
{
IsSupportCheckDone = true;
}
return isSupported;
}
}
private static bool isSupported;
private static bool IsSupportCheckDone;
private static Dictionary<Uri, HTTPCacheFileInfo> library;
private static Dictionary<Uri, HTTPCacheFileInfo> Library { get { LoadLibrary(); return library; } }
private static Dictionary<UInt64, HTTPCacheFileInfo> UsedIndexes = new Dictionary<ulong, HTTPCacheFileInfo>();
internal static string CacheFolder { get; private set; }
private static string LibraryPath { get; set; }
private static bool InClearThread;
private static bool InMaintainenceThread;
/// <summary>
/// Stores the index of the next stored entity. The entity's file name is generated from this index.
/// </summary>
private static UInt64 NextNameIDX;
#endregion
static HTTPCacheService()
{
NextNameIDX = 0x0001;
}
#region Common Functions
internal static void CheckSetup()
{
if (!HTTPCacheService.IsSupported)
return;
try
{
SetupCacheFolder();
LoadLibrary();
}
catch
{ }
}
internal static void SetupCacheFolder()
{
if (!HTTPCacheService.IsSupported)
return;
try
{
if (string.IsNullOrEmpty(CacheFolder) || string.IsNullOrEmpty(LibraryPath))
{
CacheFolder = System.IO.Path.Combine(HTTPManager.GetRootCacheFolder(), "HTTPCache");
if (!Directory.Exists(CacheFolder))
Directory.CreateDirectory(CacheFolder);
LibraryPath = System.IO.Path.Combine(HTTPManager.GetRootCacheFolder(), "Library");
}
}
catch
{ }
}
internal static UInt64 GetNameIdx()
{
lock(Library)
{
UInt64 result = NextNameIDX;
do
{
NextNameIDX = ++NextNameIDX % UInt64.MaxValue;
} while (UsedIndexes.ContainsKey(NextNameIDX));
return result;
}
}
internal static bool HasEntity(Uri uri)
{
if (!IsSupported)
return false;
lock (Library)
return Library.ContainsKey(uri);
}
internal static bool DeleteEntity(Uri uri, bool removeFromLibrary = true)
{
if (!IsSupported)
return false;
object uriLocker = HTTPCacheFileLock.Acquire(uri);
// Just use lock now: http://forum.unity3d.com/threads/4-6-ios-64-bit-beta.290551/page-6#post-1937033
// To avoid a dead-lock we try acquire the lock on this uri only for a little time.
// If we can't acquire it, its better to just return without risking a deadlock.
//if (Monitor.TryEnter(uriLocker, TimeSpan.FromSeconds(0.5f)))
lock(uriLocker)
{
try
{
lock (Library)
{
HTTPCacheFileInfo info;
bool inStats = Library.TryGetValue(uri, out info);
if (inStats)
info.Delete();
if (inStats && removeFromLibrary)
{
Library.Remove(uri);
UsedIndexes.Remove(info.MappedNameIDX);
}
return true;
}
}
finally
{
//Monitor.Exit(uriLocker);
}
}
//return false;
}
internal static bool IsCachedEntityExpiresInTheFuture(HTTPRequest request)
{
if (!IsSupported)
return false;
HTTPCacheFileInfo info;
lock (Library)
if (Library.TryGetValue(request.CurrentUri, out info))
return info.WillExpireInTheFuture();
return false;
}
/// <summary>
/// Utility function to set the cache control headers according to the spec.: http://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html#sec13.3.4
/// </summary>
/// <param name="request"></param>
internal static void SetHeaders(HTTPRequest request)
{
if (!IsSupported)
return;
HTTPCacheFileInfo info;
lock (Library)
if (Library.TryGetValue(request.CurrentUri, out info))
info.SetUpRevalidationHeaders(request);
}
#endregion
#region Get Functions
internal static System.IO.Stream GetBody(Uri uri, out int length)
{
length = 0;
if (!IsSupported)
return null;
HTTPCacheFileInfo info;
lock (Library)
if (Library.TryGetValue(uri, out info))
return info.GetBodyStream(out length);
return null;
}
internal static HTTPResponse GetFullResponse(HTTPRequest request)
{
if (!IsSupported)
return null;
HTTPCacheFileInfo info;
lock (Library)
if (Library.TryGetValue(request.CurrentUri, out info))
return info.ReadResponseTo(request);
return null;
}
#endregion
#region Storing
/// <summary>
/// Checks if the given response can be cached. http://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html#sec13.4
/// </summary>
/// <returns>Returns true if cacheable, false otherwise.</returns>
internal static bool IsCacheble(Uri uri, HTTPMethods method, HTTPResponse response)
{
if (!IsSupported)
return false;
if (method != HTTPMethods.Get)
return false;
if (response == null)
return false;
// Already cached
if (response.StatusCode == 304)
return false;
if (response.StatusCode < 200 || response.StatusCode >= 400)
return false;
//http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9.2
var cacheControls = response.GetHeaderValues("cache-control");
if (cacheControls != null)
{
if (cacheControls.Exists(headerValue => {
string value = headerValue.ToLower();
return value.Contains("no-store") || value.Contains("no-cache");
}))
return false;
}
var pragmas = response.GetHeaderValues("pragma");
if (pragmas != null)
{
if (pragmas.Exists(headerValue => {
string value = headerValue.ToLower();
return value.Contains("no-store") || value.Contains("no-cache");
}))
return false;
}
// Responses with byte ranges not supported yet.
var byteRanges = response.GetHeaderValues("content-range");
if (byteRanges != null)
return false;
return true;
}
internal static HTTPCacheFileInfo Store(Uri uri, HTTPMethods method, HTTPResponse response)
{
if (response == null || response.Data == null || response.Data.Length == 0)
return null;
if (!IsSupported)
return null;
HTTPCacheFileInfo info = null;
lock (Library)
{
if (!Library.TryGetValue(uri, out info))
{
Library.Add(uri, info = new HTTPCacheFileInfo(uri));
UsedIndexes.Add(info.MappedNameIDX, info);
}
try
{
info.Store(response);
}
catch
{
// If something happens while we write out the response, than we will delete it becouse it might be in an invalid state.
DeleteEntity(uri);
throw;
}
}
return info;
}
internal static System.IO.Stream PrepareStreamed(Uri uri, HTTPResponse response)
{
if (!IsSupported)
return null;
HTTPCacheFileInfo info;
lock (Library)
{
if (!Library.TryGetValue(uri, out info))
{
Library.Add(uri, info = new HTTPCacheFileInfo(uri));
UsedIndexes.Add(info.MappedNameIDX, info);
}
try
{
return info.GetSaveStream(response);
}
catch
{
// If something happens while we write out the response, than we will delete it becouse it might be in an invalid state.
DeleteEntity(uri);
throw;
}
}
}
#endregion
#region Public Maintanance Functions
/// <summary>
/// Deletes all cache entity. Non blocking.
/// <remarks>Call it only if there no requests currently processed, becouse cache entries can be deleted while a server sends back a 304 result, so there will be no data to read from the cache!</remarks>
/// </summary>
public static void BeginClear()
{
if (!IsSupported)
return;
if (InClearThread)
return;
InClearThread = true;
SetupCacheFolder();
#if !NETFX_CORE
//ThreadPool.QueueUserWorkItem(new WaitCallback((param) =>
new Thread(ClearImpl).Start();
#else
Windows.System.Threading.ThreadPool.RunAsync(ClearImpl);
#endif
}
private static void ClearImpl(object param)
{
if (!IsSupported)
return;
try
{
// GetFiles will return a string array that contains the files in the folder with the full path
string[] cacheEntries = Directory.GetFiles(CacheFolder);
for (int i = 0; i < cacheEntries.Length; ++i)
{
// We need a try-catch block becouse between the Directory.GetFiles call and the File.Delete calls a maintainance job, or other file operations can delelete any file from the cache folder.
// So while there might be some problem with any file, we don't want to abort the whole for loop
try
{
File.Delete(cacheEntries[i]);
}
catch
{ }
}
}
finally
{
UsedIndexes.Clear();
library.Clear();
NextNameIDX = 0x0001;
SaveLibrary();
InClearThread = false;
}
}
/// <summary>
/// Deletes all expired cache entity.
/// <remarks>Call it only if there no requests currently processed, becouse cache entries can be deleted while a server sends back a 304 result, so there will be no data to read from the cache!</remarks>
/// </summary>
public static void BeginMaintainence(HTTPCacheMaintananceParams maintananceParam)
{
if (maintananceParam == null)
throw new ArgumentNullException("maintananceParams == null");
if (!HTTPCacheService.IsSupported)
return;
if (InMaintainenceThread)
return;
InMaintainenceThread = true;
SetupCacheFolder();
#if !NETFX_CORE
//ThreadPool.QueueUserWorkItem(new WaitCallback((param) =>
new Thread((param) =>
#else
Windows.System.Threading.ThreadPool.RunAsync((param) =>
#endif
{
try
{
lock (Library)
{
// Delete cache entries older than the given time.
DateTime deleteOlderAccessed = DateTime.UtcNow - maintananceParam.DeleteOlder;
List<HTTPCacheFileInfo> removedEntities = new List<HTTPCacheFileInfo>();
foreach (var kvp in Library)
if (kvp.Value.LastAccess < deleteOlderAccessed)
{
if (DeleteEntity(kvp.Key, false))
removedEntities.Add(kvp.Value);
}
for (int i = 0; i < removedEntities.Count; ++i)
{
Library.Remove(removedEntities[i].Uri);
UsedIndexes.Remove(removedEntities[i].MappedNameIDX);
}
removedEntities.Clear();
ulong cacheSize = GetCacheSize();
// This step will delete all entries starting with the oldest LastAccess property while the cache size greater then the MaxCacheSize in the given param.
if (cacheSize > maintananceParam.MaxCacheSize)
{
List<HTTPCacheFileInfo> fileInfos = new List<HTTPCacheFileInfo>(library.Count);
foreach(var kvp in library)
fileInfos.Add(kvp.Value);
fileInfos.Sort();
int idx = 0;
while (cacheSize >= maintananceParam.MaxCacheSize && idx < fileInfos.Count)
{
try
{
var fi = fileInfos[idx];
ulong length = (ulong)fi.BodyLength;
DeleteEntity(fi.Uri);
cacheSize -= length;
}
catch
{}
finally
{
++idx;
}
}
}
}
}
finally
{
SaveLibrary();
InMaintainenceThread = false;
}
}
#if !NETFX_CORE
).Start();
#else
);
#endif
}
public static int GetCacheEntityCount()
{
if (!HTTPCacheService.IsSupported)
return 0;
CheckSetup();
lock(Library)
return Library.Count;
}
public static ulong GetCacheSize()
{
ulong size = 0;
if (!IsSupported)
return size;
CheckSetup();
lock (Library)
foreach (var kvp in Library)
if (kvp.Value.BodyLength > 0)
size += (ulong)kvp.Value.BodyLength;
return size;
}
#endregion
#region Cache Library Management
private static void LoadLibrary()
{
// Already loaded?
if (library != null)
return;
if (!IsSupported)
return;
library = new Dictionary<Uri, HTTPCacheFileInfo>();
if (!File.Exists(LibraryPath))
{
DeleteUnusedFiles();
return;
}
try
{
int version;
lock (library)
{
using (var fs = new FileStream(LibraryPath, FileMode.Open))
using (var br = new System.IO.BinaryReader(fs))
{
version = br.ReadInt32();
if (version > 1)
NextNameIDX = br.ReadUInt64();
int statCount = br.ReadInt32();
for (int i = 0; i < statCount; ++i)
{
Uri uri = new Uri(br.ReadString());
var entity = new HTTPCacheFileInfo(uri, br, version);
if (entity.IsExists())
{
library.Add(uri, entity);
if (version > 1)
UsedIndexes.Add(entity.MappedNameIDX, entity);
}
}
}
}
if (version == 1)
BeginClear();
else
DeleteUnusedFiles();
}
catch
{}
}
internal static void SaveLibrary()
{
if (library == null)
return;
if (!IsSupported)
return;
try
{
lock (Library)
{
using (var fs = new FileStream(LibraryPath, FileMode.Create))
using (var bw = new System.IO.BinaryWriter(fs))
{
bw.Write(LibraryVersion);
bw.Write(NextNameIDX);
bw.Write(Library.Count);
foreach (var kvp in Library)
{
bw.Write(kvp.Key.ToString());
kvp.Value.SaveTo(bw);
}
}
}
}
catch
{}
}
internal static void SetBodyLength(Uri uri, int bodyLength)
{
if (!IsSupported)
return;
lock (Library)
{
HTTPCacheFileInfo fileInfo;
if (Library.TryGetValue(uri, out fileInfo))
fileInfo.BodyLength = bodyLength;
else
{
Library.Add(uri, fileInfo = new HTTPCacheFileInfo(uri, DateTime.UtcNow, bodyLength));
UsedIndexes.Add(fileInfo.MappedNameIDX, fileInfo);
}
}
}
/// <summary>
/// Deletes all files from the cache folder that isn't in the Library.
/// </summary>
private static void DeleteUnusedFiles()
{
if (!IsSupported)
return;
CheckSetup();
// GetFiles will return a string array that contains the files in the folder with the full path
string[] cacheEntries = Directory.GetFiles(CacheFolder);
for (int i = 0; i < cacheEntries.Length; ++i)
{
// We need a try-catch block becouse between the Directory.GetFiles call and the File.Delete calls a maintainance job, or other file operations can delelete any file from the cache folder.
// So while there might be some problem with any file, we don't want to abort the whole for loop
try
{
string filename = System.IO.Path.GetFileName(cacheEntries[i]);
UInt64 idx = 0;
bool deleteFile = false;
if (UInt64.TryParse(filename, System.Globalization.NumberStyles.AllowHexSpecifier, null, out idx))
lock (Library)
deleteFile = !UsedIndexes.ContainsKey(idx);
else
deleteFile = true;
if (deleteFile)
File.Delete(cacheEntries[i]);
}
catch
{}
}
}
#endregion
}
}
#endif
| |
using System;
using System.Data;
using System.Runtime.CompilerServices;
namespace Net.Code.ADONet.Extensions.SqlClient
{
class DataReaderDecorator : IDataReader
{
readonly IDataReader _decorated;
public DataReaderDecorator(IDataReader decorated)
{
_decorated = decorated;
}
public void Dispose()
{
Log();
_decorated.Dispose();
}
public string GetName(int i)
{
Log(i);
return _decorated.GetName(i);
}
public string GetDataTypeName(int i)
{
Log(i);
return _decorated.GetDataTypeName(i);
}
public Type GetFieldType(int i)
{
Log(i);
return _decorated.GetFieldType(i);
}
public object GetValue(int i)
{
Log(i);
return _decorated.GetValue(i);
}
public int GetValues(object[] values)
{
Log();
return _decorated.GetValues(values);
}
public int GetOrdinal(string name)
{
Log((object)name);
return _decorated.GetOrdinal(name);
}
public bool GetBoolean(int i)
{
Log(i);
return _decorated.GetBoolean(i);
}
public byte GetByte(int i)
{
Log(i);
return _decorated.GetByte(i);
}
public long GetBytes(int i, long fieldOffset, byte[] buffer, int bufferoffset, int length)
{
Log();
return _decorated.GetBytes(i, fieldOffset, buffer, bufferoffset, length);
}
public char GetChar(int i)
{
Log(i);
return _decorated.GetChar(i);
}
public long GetChars(int i, long fieldoffset, char[] buffer, int bufferoffset, int length)
{
Log();
return _decorated.GetChars(i, fieldoffset, buffer, bufferoffset, length);
}
public Guid GetGuid(int i)
{
Log(i);
return _decorated.GetGuid(i);
}
public short GetInt16(int i)
{
Log(i);
return _decorated.GetInt16(i);
}
public int GetInt32(int i)
{
Log(i);
return _decorated.GetInt32(i);
}
public long GetInt64(int i)
{
Log(i);
return _decorated.GetInt64(i);
}
public float GetFloat(int i)
{
Log(i);
return _decorated.GetFloat(i);
}
public double GetDouble(int i)
{
Log(i);
return _decorated.GetDouble(i);
}
public string GetString(int i)
{
Log(i);
return _decorated.GetString(i);
}
private void Log([CallerMemberName] string name = null)
{
Console.WriteLine(name);
}
private void Log(object arg, [CallerMemberName] string name = null)
{
Console.WriteLine($"{name}({arg})");
}
public decimal GetDecimal(int i)
{
Log(i);
return _decorated.GetDecimal(i);
}
public DateTime GetDateTime(int i)
{
Log(i);
return _decorated.GetDateTime(i);
}
public IDataReader GetData(int i)
{
Log(i);
return _decorated.GetData(i);
}
public bool IsDBNull(int i)
{
Log(i);
return _decorated.IsDBNull(i);
}
public int FieldCount
{
get
{
Log();
return _decorated.FieldCount;
}
}
object IDataRecord.this[int i]
{
get
{
Log(i);
return _decorated[i];
}
}
object IDataRecord.this[string name]
{
get
{
Log((object)name);
return _decorated[name];
}
}
public void Close()
{
Log();
_decorated.Close();
}
public DataTable GetSchemaTable()
{
Log();
return _decorated.GetSchemaTable();
}
public bool NextResult()
{
Log();
return _decorated.NextResult();
}
public bool Read()
{
Log();
return _decorated.Read();
}
public int Depth
{
get
{
Log();
return _decorated.Depth;
}
}
public bool IsClosed
{
get
{
Log();
return _decorated.IsClosed;
}
}
public int RecordsAffected
{
get
{
Log();
return _decorated.RecordsAffected;
}
}
}
}
| |
#if !BESTHTTP_DISABLE_ALTERNATE_SSL && (!UNITY_WEBGL || UNITY_EDITOR)
using System;
using System.Collections;
using System.IO;
using Org.BouncyCastle.Asn1;
using Org.BouncyCastle.Asn1.CryptoPro;
using Org.BouncyCastle.Asn1.Nist;
using Org.BouncyCastle.Asn1.Pkcs;
using Org.BouncyCastle.Asn1.TeleTrust;
using Org.BouncyCastle.Asn1.X509;
using Org.BouncyCastle.Asn1.X9;
using Org.BouncyCastle.Security;
using Org.BouncyCastle.Crypto.Digests;
using Org.BouncyCastle.Crypto;
using Org.BouncyCastle.Crypto.Engines;
using Org.BouncyCastle.Crypto.Signers;
using Org.BouncyCastle.Utilities;
namespace Org.BouncyCastle.Security
{
/// <summary>
/// Signer Utility class contains methods that can not be specifically grouped into other classes.
/// </summary>
public sealed class SignerUtilities
{
private SignerUtilities()
{
}
internal static readonly IDictionary algorithms = Platform.CreateHashtable();
internal static readonly IDictionary oids = Platform.CreateHashtable();
static SignerUtilities()
{
algorithms["MD2WITHRSA"] = "MD2withRSA";
algorithms["MD2WITHRSAENCRYPTION"] = "MD2withRSA";
algorithms[PkcsObjectIdentifiers.MD2WithRsaEncryption.Id] = "MD2withRSA";
algorithms["MD4WITHRSA"] = "MD4withRSA";
algorithms["MD4WITHRSAENCRYPTION"] = "MD4withRSA";
algorithms[PkcsObjectIdentifiers.MD4WithRsaEncryption.Id] = "MD4withRSA";
algorithms["MD5WITHRSA"] = "MD5withRSA";
algorithms["MD5WITHRSAENCRYPTION"] = "MD5withRSA";
algorithms[PkcsObjectIdentifiers.MD5WithRsaEncryption.Id] = "MD5withRSA";
algorithms["SHA1WITHRSA"] = "SHA-1withRSA";
algorithms["SHA1WITHRSAENCRYPTION"] = "SHA-1withRSA";
algorithms[PkcsObjectIdentifiers.Sha1WithRsaEncryption.Id] = "SHA-1withRSA";
algorithms["SHA-1WITHRSA"] = "SHA-1withRSA";
algorithms["SHA224WITHRSA"] = "SHA-224withRSA";
algorithms["SHA224WITHRSAENCRYPTION"] = "SHA-224withRSA";
algorithms[PkcsObjectIdentifiers.Sha224WithRsaEncryption.Id] = "SHA-224withRSA";
algorithms["SHA-224WITHRSA"] = "SHA-224withRSA";
algorithms["SHA256WITHRSA"] = "SHA-256withRSA";
algorithms["SHA256WITHRSAENCRYPTION"] = "SHA-256withRSA";
algorithms[PkcsObjectIdentifiers.Sha256WithRsaEncryption.Id] = "SHA-256withRSA";
algorithms["SHA-256WITHRSA"] = "SHA-256withRSA";
algorithms["SHA384WITHRSA"] = "SHA-384withRSA";
algorithms["SHA384WITHRSAENCRYPTION"] = "SHA-384withRSA";
algorithms[PkcsObjectIdentifiers.Sha384WithRsaEncryption.Id] = "SHA-384withRSA";
algorithms["SHA-384WITHRSA"] = "SHA-384withRSA";
algorithms["SHA512WITHRSA"] = "SHA-512withRSA";
algorithms["SHA512WITHRSAENCRYPTION"] = "SHA-512withRSA";
algorithms[PkcsObjectIdentifiers.Sha512WithRsaEncryption.Id] = "SHA-512withRSA";
algorithms["SHA-512WITHRSA"] = "SHA-512withRSA";
algorithms["PSSWITHRSA"] = "PSSwithRSA";
algorithms["RSASSA-PSS"] = "PSSwithRSA";
algorithms[PkcsObjectIdentifiers.IdRsassaPss.Id] = "PSSwithRSA";
algorithms["RSAPSS"] = "PSSwithRSA";
algorithms["SHA1WITHRSAANDMGF1"] = "SHA-1withRSAandMGF1";
algorithms["SHA-1WITHRSAANDMGF1"] = "SHA-1withRSAandMGF1";
algorithms["SHA1WITHRSA/PSS"] = "SHA-1withRSAandMGF1";
algorithms["SHA-1WITHRSA/PSS"] = "SHA-1withRSAandMGF1";
algorithms["SHA224WITHRSAANDMGF1"] = "SHA-224withRSAandMGF1";
algorithms["SHA-224WITHRSAANDMGF1"] = "SHA-224withRSAandMGF1";
algorithms["SHA224WITHRSA/PSS"] = "SHA-224withRSAandMGF1";
algorithms["SHA-224WITHRSA/PSS"] = "SHA-224withRSAandMGF1";
algorithms["SHA256WITHRSAANDMGF1"] = "SHA-256withRSAandMGF1";
algorithms["SHA-256WITHRSAANDMGF1"] = "SHA-256withRSAandMGF1";
algorithms["SHA256WITHRSA/PSS"] = "SHA-256withRSAandMGF1";
algorithms["SHA-256WITHRSA/PSS"] = "SHA-256withRSAandMGF1";
algorithms["SHA384WITHRSAANDMGF1"] = "SHA-384withRSAandMGF1";
algorithms["SHA-384WITHRSAANDMGF1"] = "SHA-384withRSAandMGF1";
algorithms["SHA384WITHRSA/PSS"] = "SHA-384withRSAandMGF1";
algorithms["SHA-384WITHRSA/PSS"] = "SHA-384withRSAandMGF1";
algorithms["SHA512WITHRSAANDMGF1"] = "SHA-512withRSAandMGF1";
algorithms["SHA-512WITHRSAANDMGF1"] = "SHA-512withRSAandMGF1";
algorithms["SHA512WITHRSA/PSS"] = "SHA-512withRSAandMGF1";
algorithms["SHA-512WITHRSA/PSS"] = "SHA-512withRSAandMGF1";
algorithms["RIPEMD128WITHRSA"] = "RIPEMD128withRSA";
algorithms["RIPEMD128WITHRSAENCRYPTION"] = "RIPEMD128withRSA";
algorithms[TeleTrusTObjectIdentifiers.RsaSignatureWithRipeMD128.Id] = "RIPEMD128withRSA";
algorithms["RIPEMD160WITHRSA"] = "RIPEMD160withRSA";
algorithms["RIPEMD160WITHRSAENCRYPTION"] = "RIPEMD160withRSA";
algorithms[TeleTrusTObjectIdentifiers.RsaSignatureWithRipeMD160.Id] = "RIPEMD160withRSA";
algorithms["RIPEMD256WITHRSA"] = "RIPEMD256withRSA";
algorithms["RIPEMD256WITHRSAENCRYPTION"] = "RIPEMD256withRSA";
algorithms[TeleTrusTObjectIdentifiers.RsaSignatureWithRipeMD256.Id] = "RIPEMD256withRSA";
algorithms["NONEWITHRSA"] = "RSA";
algorithms["RSAWITHNONE"] = "RSA";
algorithms["RAWRSA"] = "RSA";
algorithms["RAWRSAPSS"] = "RAWRSASSA-PSS";
algorithms["NONEWITHRSAPSS"] = "RAWRSASSA-PSS";
algorithms["NONEWITHRSASSA-PSS"] = "RAWRSASSA-PSS";
algorithms["NONEWITHDSA"] = "NONEwithDSA";
algorithms["DSAWITHNONE"] = "NONEwithDSA";
algorithms["RAWDSA"] = "NONEwithDSA";
algorithms["DSA"] = "SHA-1withDSA";
algorithms["DSAWITHSHA1"] = "SHA-1withDSA";
algorithms["DSAWITHSHA-1"] = "SHA-1withDSA";
algorithms["SHA/DSA"] = "SHA-1withDSA";
algorithms["SHA1/DSA"] = "SHA-1withDSA";
algorithms["SHA-1/DSA"] = "SHA-1withDSA";
algorithms["SHA1WITHDSA"] = "SHA-1withDSA";
algorithms["SHA-1WITHDSA"] = "SHA-1withDSA";
algorithms[X9ObjectIdentifiers.IdDsaWithSha1.Id] = "SHA-1withDSA";
algorithms["DSAWITHSHA224"] = "SHA-224withDSA";
algorithms["DSAWITHSHA-224"] = "SHA-224withDSA";
algorithms["SHA224/DSA"] = "SHA-224withDSA";
algorithms["SHA-224/DSA"] = "SHA-224withDSA";
algorithms["SHA224WITHDSA"] = "SHA-224withDSA";
algorithms["SHA-224WITHDSA"] = "SHA-224withDSA";
algorithms[NistObjectIdentifiers.DsaWithSha224.Id] = "SHA-224withDSA";
algorithms["DSAWITHSHA256"] = "SHA-256withDSA";
algorithms["DSAWITHSHA-256"] = "SHA-256withDSA";
algorithms["SHA256/DSA"] = "SHA-256withDSA";
algorithms["SHA-256/DSA"] = "SHA-256withDSA";
algorithms["SHA256WITHDSA"] = "SHA-256withDSA";
algorithms["SHA-256WITHDSA"] = "SHA-256withDSA";
algorithms[NistObjectIdentifiers.DsaWithSha256.Id] = "SHA-256withDSA";
algorithms["DSAWITHSHA384"] = "SHA-384withDSA";
algorithms["DSAWITHSHA-384"] = "SHA-384withDSA";
algorithms["SHA384/DSA"] = "SHA-384withDSA";
algorithms["SHA-384/DSA"] = "SHA-384withDSA";
algorithms["SHA384WITHDSA"] = "SHA-384withDSA";
algorithms["SHA-384WITHDSA"] = "SHA-384withDSA";
algorithms[NistObjectIdentifiers.DsaWithSha384.Id] = "SHA-384withDSA";
algorithms["DSAWITHSHA512"] = "SHA-512withDSA";
algorithms["DSAWITHSHA-512"] = "SHA-512withDSA";
algorithms["SHA512/DSA"] = "SHA-512withDSA";
algorithms["SHA-512/DSA"] = "SHA-512withDSA";
algorithms["SHA512WITHDSA"] = "SHA-512withDSA";
algorithms["SHA-512WITHDSA"] = "SHA-512withDSA";
algorithms[NistObjectIdentifiers.DsaWithSha512.Id] = "SHA-512withDSA";
algorithms["NONEWITHECDSA"] = "NONEwithECDSA";
algorithms["ECDSAWITHNONE"] = "NONEwithECDSA";
algorithms["ECDSA"] = "SHA-1withECDSA";
algorithms["SHA1/ECDSA"] = "SHA-1withECDSA";
algorithms["SHA-1/ECDSA"] = "SHA-1withECDSA";
algorithms["ECDSAWITHSHA1"] = "SHA-1withECDSA";
algorithms["ECDSAWITHSHA-1"] = "SHA-1withECDSA";
algorithms["SHA1WITHECDSA"] = "SHA-1withECDSA";
algorithms["SHA-1WITHECDSA"] = "SHA-1withECDSA";
algorithms[X9ObjectIdentifiers.ECDsaWithSha1.Id] = "SHA-1withECDSA";
algorithms[TeleTrusTObjectIdentifiers.ECSignWithSha1.Id] = "SHA-1withECDSA";
algorithms["SHA224/ECDSA"] = "SHA-224withECDSA";
algorithms["SHA-224/ECDSA"] = "SHA-224withECDSA";
algorithms["ECDSAWITHSHA224"] = "SHA-224withECDSA";
algorithms["ECDSAWITHSHA-224"] = "SHA-224withECDSA";
algorithms["SHA224WITHECDSA"] = "SHA-224withECDSA";
algorithms["SHA-224WITHECDSA"] = "SHA-224withECDSA";
algorithms[X9ObjectIdentifiers.ECDsaWithSha224.Id] = "SHA-224withECDSA";
algorithms["SHA256/ECDSA"] = "SHA-256withECDSA";
algorithms["SHA-256/ECDSA"] = "SHA-256withECDSA";
algorithms["ECDSAWITHSHA256"] = "SHA-256withECDSA";
algorithms["ECDSAWITHSHA-256"] = "SHA-256withECDSA";
algorithms["SHA256WITHECDSA"] = "SHA-256withECDSA";
algorithms["SHA-256WITHECDSA"] = "SHA-256withECDSA";
algorithms[X9ObjectIdentifiers.ECDsaWithSha256.Id] = "SHA-256withECDSA";
algorithms["SHA384/ECDSA"] = "SHA-384withECDSA";
algorithms["SHA-384/ECDSA"] = "SHA-384withECDSA";
algorithms["ECDSAWITHSHA384"] = "SHA-384withECDSA";
algorithms["ECDSAWITHSHA-384"] = "SHA-384withECDSA";
algorithms["SHA384WITHECDSA"] = "SHA-384withECDSA";
algorithms["SHA-384WITHECDSA"] = "SHA-384withECDSA";
algorithms[X9ObjectIdentifiers.ECDsaWithSha384.Id] = "SHA-384withECDSA";
algorithms["SHA512/ECDSA"] = "SHA-512withECDSA";
algorithms["SHA-512/ECDSA"] = "SHA-512withECDSA";
algorithms["ECDSAWITHSHA512"] = "SHA-512withECDSA";
algorithms["ECDSAWITHSHA-512"] = "SHA-512withECDSA";
algorithms["SHA512WITHECDSA"] = "SHA-512withECDSA";
algorithms["SHA-512WITHECDSA"] = "SHA-512withECDSA";
algorithms[X9ObjectIdentifiers.ECDsaWithSha512.Id] = "SHA-512withECDSA";
algorithms["RIPEMD160/ECDSA"] = "RIPEMD160withECDSA";
algorithms["ECDSAWITHRIPEMD160"] = "RIPEMD160withECDSA";
algorithms["RIPEMD160WITHECDSA"] = "RIPEMD160withECDSA";
algorithms[TeleTrusTObjectIdentifiers.ECSignWithRipeMD160.Id] = "RIPEMD160withECDSA";
algorithms["GOST-3410"] = "GOST3410";
algorithms["GOST-3410-94"] = "GOST3410";
algorithms["GOST3411WITHGOST3410"] = "GOST3410";
algorithms[CryptoProObjectIdentifiers.GostR3411x94WithGostR3410x94.Id] = "GOST3410";
algorithms["ECGOST-3410"] = "ECGOST3410";
algorithms["ECGOST-3410-2001"] = "ECGOST3410";
algorithms["GOST3411WITHECGOST3410"] = "ECGOST3410";
algorithms[CryptoProObjectIdentifiers.GostR3411x94WithGostR3410x2001.Id] = "ECGOST3410";
oids["MD2withRSA"] = PkcsObjectIdentifiers.MD2WithRsaEncryption;
oids["MD4withRSA"] = PkcsObjectIdentifiers.MD4WithRsaEncryption;
oids["MD5withRSA"] = PkcsObjectIdentifiers.MD5WithRsaEncryption;
oids["SHA-1withRSA"] = PkcsObjectIdentifiers.Sha1WithRsaEncryption;
oids["SHA-224withRSA"] = PkcsObjectIdentifiers.Sha224WithRsaEncryption;
oids["SHA-256withRSA"] = PkcsObjectIdentifiers.Sha256WithRsaEncryption;
oids["SHA-384withRSA"] = PkcsObjectIdentifiers.Sha384WithRsaEncryption;
oids["SHA-512withRSA"] = PkcsObjectIdentifiers.Sha512WithRsaEncryption;
oids["PSSwithRSA"] = PkcsObjectIdentifiers.IdRsassaPss;
oids["SHA-1withRSAandMGF1"] = PkcsObjectIdentifiers.IdRsassaPss;
oids["SHA-224withRSAandMGF1"] = PkcsObjectIdentifiers.IdRsassaPss;
oids["SHA-256withRSAandMGF1"] = PkcsObjectIdentifiers.IdRsassaPss;
oids["SHA-384withRSAandMGF1"] = PkcsObjectIdentifiers.IdRsassaPss;
oids["SHA-512withRSAandMGF1"] = PkcsObjectIdentifiers.IdRsassaPss;
oids["RIPEMD128withRSA"] = TeleTrusTObjectIdentifiers.RsaSignatureWithRipeMD128;
oids["RIPEMD160withRSA"] = TeleTrusTObjectIdentifiers.RsaSignatureWithRipeMD160;
oids["RIPEMD256withRSA"] = TeleTrusTObjectIdentifiers.RsaSignatureWithRipeMD256;
oids["SHA-1withDSA"] = X9ObjectIdentifiers.IdDsaWithSha1;
oids["SHA-1withECDSA"] = X9ObjectIdentifiers.ECDsaWithSha1;
oids["SHA-224withECDSA"] = X9ObjectIdentifiers.ECDsaWithSha224;
oids["SHA-256withECDSA"] = X9ObjectIdentifiers.ECDsaWithSha256;
oids["SHA-384withECDSA"] = X9ObjectIdentifiers.ECDsaWithSha384;
oids["SHA-512withECDSA"] = X9ObjectIdentifiers.ECDsaWithSha512;
oids["GOST3410"] = CryptoProObjectIdentifiers.GostR3411x94WithGostR3410x94;
oids["ECGOST3410"] = CryptoProObjectIdentifiers.GostR3411x94WithGostR3410x2001;
}
/// <summary>
/// Returns an ObjectIdentifier for a given encoding.
/// </summary>
/// <param name="mechanism">A string representation of the encoding.</param>
/// <returns>A DerObjectIdentifier, null if the OID is not available.</returns>
// TODO Don't really want to support this
public static DerObjectIdentifier GetObjectIdentifier(
string mechanism)
{
if (mechanism == null)
throw new ArgumentNullException("mechanism");
mechanism = Platform.ToUpperInvariant(mechanism);
string aliased = (string) algorithms[mechanism];
if (aliased != null)
mechanism = aliased;
return (DerObjectIdentifier) oids[mechanism];
}
public static ICollection Algorithms
{
get { return oids.Keys; }
}
public static Asn1Encodable GetDefaultX509Parameters(
DerObjectIdentifier id)
{
return GetDefaultX509Parameters(id.Id);
}
public static Asn1Encodable GetDefaultX509Parameters(
string algorithm)
{
if (algorithm == null)
throw new ArgumentNullException("algorithm");
algorithm = Platform.ToUpperInvariant(algorithm);
string mechanism = (string) algorithms[algorithm];
if (mechanism == null)
mechanism = algorithm;
if (mechanism == "PSSwithRSA")
{
// TODO The Sha1Digest here is a default. In JCE version, the actual digest
// to be used can be overridden by subsequent parameter settings.
return GetPssX509Parameters("SHA-1");
}
if (mechanism.EndsWith("withRSAandMGF1"))
{
string digestName = mechanism.Substring(0, mechanism.Length - "withRSAandMGF1".Length);
return GetPssX509Parameters(digestName);
}
return DerNull.Instance;
}
private static Asn1Encodable GetPssX509Parameters(
string digestName)
{
AlgorithmIdentifier hashAlgorithm = new AlgorithmIdentifier(
DigestUtilities.GetObjectIdentifier(digestName), DerNull.Instance);
// TODO Is it possible for the MGF hash alg to be different from the PSS one?
AlgorithmIdentifier maskGenAlgorithm = new AlgorithmIdentifier(
PkcsObjectIdentifiers.IdMgf1, hashAlgorithm);
int saltLen = DigestUtilities.GetDigest(digestName).GetDigestSize();
return new RsassaPssParameters(hashAlgorithm, maskGenAlgorithm,
new DerInteger(saltLen), new DerInteger(1));
}
public static ISigner GetSigner(
DerObjectIdentifier id)
{
return GetSigner(id.Id);
}
public static ISigner GetSigner(
string algorithm)
{
if (algorithm == null)
throw new ArgumentNullException("algorithm");
algorithm = Platform.ToUpperInvariant(algorithm);
string mechanism = (string) algorithms[algorithm];
if (mechanism == null)
mechanism = algorithm;
if (mechanism.Equals("RSA"))
{
return (new RsaDigestSigner(new NullDigest(), (AlgorithmIdentifier)null));
}
if (mechanism.Equals("MD2withRSA"))
{
return (new RsaDigestSigner(new MD2Digest()));
}
if (mechanism.Equals("MD4withRSA"))
{
return (new RsaDigestSigner(new MD4Digest()));
}
if (mechanism.Equals("MD5withRSA"))
{
return (new RsaDigestSigner(new MD5Digest()));
}
if (mechanism.Equals("SHA-1withRSA"))
{
return (new RsaDigestSigner(new Sha1Digest()));
}
if (mechanism.Equals("SHA-224withRSA"))
{
return (new RsaDigestSigner(new Sha224Digest()));
}
if (mechanism.Equals("SHA-256withRSA"))
{
return (new RsaDigestSigner(new Sha256Digest()));
}
if (mechanism.Equals("SHA-384withRSA"))
{
return (new RsaDigestSigner(new Sha384Digest()));
}
if (mechanism.Equals("SHA-512withRSA"))
{
return (new RsaDigestSigner(new Sha512Digest()));
}
if (mechanism.Equals("RIPEMD128withRSA"))
{
return (new RsaDigestSigner(new RipeMD128Digest()));
}
if (mechanism.Equals("RIPEMD160withRSA"))
{
return (new RsaDigestSigner(new RipeMD160Digest()));
}
if (mechanism.Equals("RIPEMD256withRSA"))
{
return (new RsaDigestSigner(new RipeMD256Digest()));
}
if (mechanism.Equals("RAWRSASSA-PSS"))
{
// TODO Add support for other parameter settings
return PssSigner.CreateRawSigner(new RsaBlindedEngine(), new Sha1Digest());
}
if (mechanism.Equals("PSSwithRSA"))
{
// TODO The Sha1Digest here is a default. In JCE version, the actual digest
// to be used can be overridden by subsequent parameter settings.
return (new PssSigner(new RsaBlindedEngine(), new Sha1Digest()));
}
if (mechanism.Equals("SHA-1withRSAandMGF1"))
{
return (new PssSigner(new RsaBlindedEngine(), new Sha1Digest()));
}
if (mechanism.Equals("SHA-224withRSAandMGF1"))
{
return (new PssSigner(new RsaBlindedEngine(), new Sha224Digest()));
}
if (mechanism.Equals("SHA-256withRSAandMGF1"))
{
return (new PssSigner(new RsaBlindedEngine(), new Sha256Digest()));
}
if (mechanism.Equals("SHA-384withRSAandMGF1"))
{
return (new PssSigner(new RsaBlindedEngine(), new Sha384Digest()));
}
if (mechanism.Equals("SHA-512withRSAandMGF1"))
{
return (new PssSigner(new RsaBlindedEngine(), new Sha512Digest()));
}
if (mechanism.Equals("NONEwithDSA"))
{
return (new DsaDigestSigner(new DsaSigner(), new NullDigest()));
}
if (mechanism.Equals("SHA-1withDSA"))
{
return (new DsaDigestSigner(new DsaSigner(), new Sha1Digest()));
}
if (mechanism.Equals("SHA-224withDSA"))
{
return (new DsaDigestSigner(new DsaSigner(), new Sha224Digest()));
}
if (mechanism.Equals("SHA-256withDSA"))
{
return (new DsaDigestSigner(new DsaSigner(), new Sha256Digest()));
}
if (mechanism.Equals("SHA-384withDSA"))
{
return (new DsaDigestSigner(new DsaSigner(), new Sha384Digest()));
}
if (mechanism.Equals("SHA-512withDSA"))
{
return (new DsaDigestSigner(new DsaSigner(), new Sha512Digest()));
}
if (mechanism.Equals("NONEwithECDSA"))
{
return (new DsaDigestSigner(new ECDsaSigner(), new NullDigest()));
}
if (mechanism.Equals("SHA-1withECDSA"))
{
return (new DsaDigestSigner(new ECDsaSigner(), new Sha1Digest()));
}
if (mechanism.Equals("SHA-224withECDSA"))
{
return (new DsaDigestSigner(new ECDsaSigner(), new Sha224Digest()));
}
if (mechanism.Equals("SHA-256withECDSA"))
{
return (new DsaDigestSigner(new ECDsaSigner(), new Sha256Digest()));
}
if (mechanism.Equals("SHA-384withECDSA"))
{
return (new DsaDigestSigner(new ECDsaSigner(), new Sha384Digest()));
}
if (mechanism.Equals("SHA-512withECDSA"))
{
return (new DsaDigestSigner(new ECDsaSigner(), new Sha512Digest()));
}
if (mechanism.Equals("RIPEMD160withECDSA"))
{
return (new DsaDigestSigner(new ECDsaSigner(), new RipeMD160Digest()));
}
if (mechanism.Equals("SHA1WITHECNR"))
{
return (new DsaDigestSigner(new ECNRSigner(), new Sha1Digest()));
}
if (mechanism.Equals("SHA224WITHECNR"))
{
return (new DsaDigestSigner(new ECNRSigner(), new Sha224Digest()));
}
if (mechanism.Equals("SHA256WITHECNR"))
{
return (new DsaDigestSigner(new ECNRSigner(), new Sha256Digest()));
}
if (mechanism.Equals("SHA384WITHECNR"))
{
return (new DsaDigestSigner(new ECNRSigner(), new Sha384Digest()));
}
if (mechanism.Equals("SHA512WITHECNR"))
{
return (new DsaDigestSigner(new ECNRSigner(), new Sha512Digest()));
}
if (mechanism.Equals("GOST3410"))
{
return new Gost3410DigestSigner(new Gost3410Signer(), new Gost3411Digest());
}
if (mechanism.Equals("ECGOST3410"))
{
return new Gost3410DigestSigner(new ECGost3410Signer(), new Gost3411Digest());
}
if (mechanism.Equals("SHA1WITHRSA/ISO9796-2"))
{
return new Iso9796d2Signer(new RsaBlindedEngine(), new Sha1Digest(), true);
}
if (mechanism.Equals("MD5WITHRSA/ISO9796-2"))
{
return new Iso9796d2Signer(new RsaBlindedEngine(), new MD5Digest(), true);
}
if (mechanism.Equals("RIPEMD160WITHRSA/ISO9796-2"))
{
return new Iso9796d2Signer(new RsaBlindedEngine(), new RipeMD160Digest(), true);
}
if (mechanism.EndsWith("/X9.31"))
{
string x931 = mechanism.Substring(0, mechanism.Length - "/X9.31".Length);
int withPos = x931.IndexOf("WITH");
if (withPos > 0)
{
int endPos = withPos + "WITH".Length;
string digestName = x931.Substring(0, withPos);
IDigest digest = DigestUtilities.GetDigest(digestName);
string cipherName = x931.Substring(endPos, x931.Length - endPos);
if (cipherName.Equals("RSA"))
{
IAsymmetricBlockCipher cipher = new RsaBlindedEngine();
return new X931Signer(cipher, digest);
}
}
}
throw new SecurityUtilityException("Signer " + algorithm + " not recognised.");
}
public static string GetEncodingName(
DerObjectIdentifier oid)
{
return (string) algorithms[oid.Id];
}
}
}
#endif
| |
using System;
using System.Collections;
using System.ComponentModel;
using System.Diagnostics;
using System.Windows.Threading;
using System.Runtime.InteropServices;
using System.Windows.Controls;
using System.Windows.Controls.Primitives;
using System.Windows;
using System.Windows.Automation.Peers;
using System.Windows.Input;
using System.Windows.Media;
using MS.Utility;
using MS.Win32;
using MS.Internal;
using MS.Internal.PresentationFramework; // SafeSecurityHelper
namespace System.Windows.Controls.Primitives
{
/// <summary>
/// The thumb control enables basic drag-movement functionality for scrollbars and window resizing widgets.
/// </summary>
/// <remarks>
/// The thumb can receive mouse focus but it cannot receive keyboard focus.
/// As well, there is no threshhold at which the control stops firing its DragDeltaEvent.
/// Once in mouse capture, the DragDeltaEvent fires until the mouse button is released.
/// </remarks>
[DefaultEvent("DragDelta")]
[Localizability(LocalizationCategory.NeverLocalize)]
public class Thumb : Control
{
#region Constructors
/// <summary>
/// Default Thumb constructor
/// </summary>
/// <remarks>
/// Automatic determination of current Dispatcher. Use alternative constructor
/// that accepts a Dispatcher for best performance.
/// </remarks>
public Thumb() : base()
{
}
static Thumb()
{
// Register metadata for dependency properties
DefaultStyleKeyProperty.OverrideMetadata(typeof(Thumb), new FrameworkPropertyMetadata(typeof(Thumb)));
_dType = DependencyObjectType.FromSystemTypeInternal(typeof(Thumb));
FocusableProperty.OverrideMetadata(typeof(Thumb), new FrameworkPropertyMetadata(MS.Internal.KnownBoxes.BooleanBoxes.FalseBox));
EventManager.RegisterClassHandler(typeof(Thumb), Mouse.LostMouseCaptureEvent, new MouseEventHandler(OnLostMouseCapture));
IsEnabledProperty.OverrideMetadata(typeof(Thumb), new UIPropertyMetadata(new PropertyChangedCallback(OnVisualStatePropertyChanged)));
IsMouseOverPropertyKey.OverrideMetadata(typeof(Thumb), new UIPropertyMetadata(new PropertyChangedCallback(OnVisualStatePropertyChanged)));
}
#endregion
#region Properties and Events
/// <summary>
/// Event fires when user press mouse's left button on the thumb.
/// </summary>
public static readonly RoutedEvent DragStartedEvent = EventManager.RegisterRoutedEvent("DragStarted", RoutingStrategy.Bubble, typeof(DragStartedEventHandler), typeof(Thumb));
/// <summary>
/// Event fires when the thumb is in a mouse capture state and the user moves the mouse around.
/// </summary>
public static readonly RoutedEvent DragDeltaEvent = EventManager.RegisterRoutedEvent("DragDelta", RoutingStrategy.Bubble, typeof(DragDeltaEventHandler), typeof(Thumb));
/// <summary>
/// Event fires when user released mouse's left button or when CancelDrag method is called.
/// </summary>
public static readonly RoutedEvent DragCompletedEvent = EventManager.RegisterRoutedEvent("DragCompleted", RoutingStrategy.Bubble, typeof(DragCompletedEventHandler), typeof(Thumb));
/// <summary>
/// Add / Remove DragStartedEvent handler
/// </summary>
[Category("Behavior")]
public event DragStartedEventHandler DragStarted { add { AddHandler(DragStartedEvent, value); } remove { RemoveHandler(DragStartedEvent, value); } }
/// <summary>
/// Add / Remove DragDeltaEvent handler
/// </summary>
[Category("Behavior")]
public event DragDeltaEventHandler DragDelta { add { AddHandler(DragDeltaEvent, value); } remove { RemoveHandler(DragDeltaEvent, value); } }
/// <summary>
/// Add / Remove DragCompletedEvent handler
/// </summary>
[Category("Behavior")]
public event DragCompletedEventHandler DragCompleted { add { AddHandler(DragCompletedEvent, value); } remove { RemoveHandler(DragCompletedEvent, value); } }
private static readonly DependencyPropertyKey IsDraggingPropertyKey =
DependencyProperty.RegisterReadOnly(
"IsDragging",
typeof(bool),
typeof(Thumb),
new FrameworkPropertyMetadata(
MS.Internal.KnownBoxes.BooleanBoxes.FalseBox,
new PropertyChangedCallback(OnIsDraggingPropertyChanged)));
/// <summary>
/// DependencyProperty for the IsDragging property.
/// Flags: None
/// Default Value: false
/// </summary>
public static readonly DependencyProperty IsDraggingProperty = IsDraggingPropertyKey.DependencyProperty;
/// <summary>
/// IsDragging indicates that left mouse button is pressed over the thumb.
/// </summary>
[Bindable(true), Browsable(false), Category("Appearance")]
public bool IsDragging
{
get { return (bool) GetValue(IsDraggingProperty); }
protected set { SetValue(IsDraggingPropertyKey, MS.Internal.KnownBoxes.BooleanBoxes.Box(value)); }
}
#endregion Properties and Events
/// <summary>
/// Called when IsDraggingProperty is changed on "d."
/// </summary>
/// <param name="d">The object on which the property was changed.</param>
/// <param name="e">EventArgs that contains the old and new values for this property</param>
private static void OnIsDraggingPropertyChanged(DependencyObject d, DependencyPropertyChangedEventArgs e)
{
var thumb = (Thumb)d;
thumb.OnDraggingChanged(e);
thumb.UpdateVisualState();
}
#region Public methods
/// <summary>
/// This method cancels the dragging operation.
/// </summary>
public void CancelDrag()
{
if (IsDragging)
{
if (IsMouseCaptured)
{
ReleaseMouseCapture();
}
ClearValue(IsDraggingPropertyKey);
RaiseEvent(new DragCompletedEventArgs(_previousScreenCoordPosition.X - _originScreenCoordPosition.X, _previousScreenCoordPosition.Y - _originScreenCoordPosition.Y, true));
}
}
#endregion Public methods
#region Virtual methods
/// <summary>
/// This method is invoked when the IsDragging property changes.
/// </summary>
/// <param name="e">DependencyPropertyChangedEventArgs for IsDragging property.</param>
protected virtual void OnDraggingChanged(DependencyPropertyChangedEventArgs e)
{
}
#endregion Virtual methods
#region Override methods
/// <summary>
/// Change to the correct visual state for the ButtonBase.
/// </summary>
/// <param name="useTransitions">
/// true to use transitions when updating the visual state, false to
/// snap directly to the new visual state.
/// </param>
internal override void ChangeVisualState(bool useTransitions)
{
// See ButtonBase.ChangeVisualState.
// This method should be exactly like it, except we use IsDragging instead of IsPressed for the pressed state
if (!IsEnabled)
{
VisualStateManager.GoToState(this, VisualStates.StateDisabled, useTransitions);
}
else if (IsDragging)
{
VisualStateManager.GoToState(this, VisualStates.StatePressed, useTransitions);
}
else if (IsMouseOver)
{
VisualStateManager.GoToState(this, VisualStates.StateMouseOver, useTransitions);
}
else
{
VisualStateManager.GoToState(this, VisualStates.StateNormal, useTransitions);
}
if (IsKeyboardFocused)
{
VisualStateManager.GoToState(this, VisualStates.StateFocused, useTransitions);
}
else
{
VisualStateManager.GoToState(this, VisualStates.StateUnfocused, useTransitions);
}
base.ChangeVisualState(useTransitions);
}
/// <summary>
/// Creates AutomationPeer (<see cref="UIElement.OnCreateAutomationPeer"/>)
/// </summary>
protected override AutomationPeer OnCreateAutomationPeer()
{
return new ThumbAutomationPeer(this);
}
/// <summary>
/// This is the method that responds to the MouseButtonEvent event.
/// </summary>
/// <param name="e"></param>
protected override void OnMouseLeftButtonDown(MouseButtonEventArgs e)
{
if (!IsDragging)
{
e.Handled = true;
Focus();
CaptureMouse();
SetValue(IsDraggingPropertyKey, true);
_originThumbPoint = e.GetPosition(this);
_previousScreenCoordPosition = _originScreenCoordPosition = SafeSecurityHelper.ClientToScreen(this,_originThumbPoint);
bool exceptionThrown = true;
try
{
RaiseEvent(new DragStartedEventArgs(_originThumbPoint.X, _originThumbPoint.Y));
exceptionThrown = false;
}
finally
{
if (exceptionThrown)
{
CancelDrag();
}
}
}
else
{
// This is weird, Thumb shouldn't get MouseLeftButtonDown event while dragging.
// This may be the case that something ate MouseLeftButtonUp event, so Thumb never had a chance to
// reset IsDragging property
Debug.Assert(false,"Got MouseLeftButtonDown event while dragging!");
}
base.OnMouseLeftButtonDown(e);
}
/// <summary>
/// This is the method that responds to the MouseButtonEvent event.
/// </summary>
/// <param name="e"></param>
protected override void OnMouseLeftButtonUp(MouseButtonEventArgs e)
{
if (IsMouseCaptured && IsDragging)
{
e.Handled = true;
ClearValue(IsDraggingPropertyKey);
ReleaseMouseCapture();
Point pt = SafeSecurityHelper.ClientToScreen(this, e.MouseDevice.GetPosition(this));
RaiseEvent(new DragCompletedEventArgs(pt.X - _originScreenCoordPosition.X, pt.Y - _originScreenCoordPosition.Y, false));
}
base.OnMouseLeftButtonUp(e);
}
// Cancel Drag if we lost capture
private static void OnLostMouseCapture(object sender, MouseEventArgs e)
{
Thumb thumb = (Thumb)sender;
if (Mouse.Captured != thumb)
{
thumb.CancelDrag();
}
}
/// <summary>
/// This is the method that responds to the MouseEvent event.
/// </summary>
/// <param name="e"></param>
protected override void OnMouseMove(MouseEventArgs e)
{
base.OnMouseMove(e);
if (IsDragging)
{
if (e.MouseDevice.LeftButton == MouseButtonState.Pressed)
{
Point thumbCoordPosition = e.GetPosition(this);
// Get client point then convert to screen point
Point screenCoordPosition = SafeSecurityHelper.ClientToScreen(this, thumbCoordPosition);
// We will fire DragDelta event only when the mouse is really moved
if (screenCoordPosition != _previousScreenCoordPosition)
{
_previousScreenCoordPosition = screenCoordPosition;
e.Handled = true;
RaiseEvent(new DragDeltaEventArgs(thumbCoordPosition.X - _originThumbPoint.X,
thumbCoordPosition.Y - _originThumbPoint.Y));
}
}
else
{
if (e.MouseDevice.Captured == this)
ReleaseMouseCapture();
ClearValue(IsDraggingPropertyKey);
_originThumbPoint.X = 0;
_originThumbPoint.Y = 0;
}
}
}
//
// This property
// 1. Finds the correct initial size for the _effectiveValues store on the current DependencyObject
// 2. This is a performance optimization
//
internal override int EffectiveValuesInitialSize
{
get { return 19; }
}
#endregion
#region Data
/// <summary>
/// The point where the mouse was clicked down (Thumb's co-ordinate).
/// </summary>
private Point _originThumbPoint; //
/// <summary>
/// The position of the mouse (screen co-ordinate) where the mouse was clicked down.
/// </summary>
private Point _originScreenCoordPosition;
/// <summary>
/// The position of the mouse (screen co-ordinate) when the previous DragDelta event was fired
/// </summary>
private Point _previousScreenCoordPosition;
#endregion
#region DTypeThemeStyleKey
// Returns the DependencyObjectType for the registered ThemeStyleKey's default
// value. Controls will override this method to return approriate types.
internal override DependencyObjectType DTypeThemeStyleKey
{
get { return _dType; }
}
private static DependencyObjectType _dType;
#endregion DTypeThemeStyleKey
}
}
| |
/************************************************************************************
Copyright : Copyright 2014 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.3 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
http://www.oculus.com/licenses/LICENSE-3.3
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
using System;
using System.Collections;
using System.Runtime.InteropServices;
using VR = UnityEngine.VR;
/// <summary>
/// Add OVROverlay script to an object with an optional mesh primitive
/// rendered as a TimeWarp overlay instead by drawing it into the eye buffer.
/// This will take full advantage of the display resolution and avoid double
/// resampling of the texture.
///
/// If the texture is dynamically generated, as for an interactive GUI or
/// animation, it must be explicitly triple buffered to avoid flickering
/// when it is referenced asynchronously by TimeWarp, check OVRRTOverlayConnector.cs for triple buffers design
///
/// We support 3 types of Overlay shapes right now
/// 1. Quad : This is most common overlay type , you render a quad in Timewarp space.
/// 2. Cylinder: [Mobile Only][Experimental], Display overlay as partial surface of a cylinder
/// * The cylinder's center will be your game object's center
/// * We encoded the cylinder's parameters in transform.scale,
/// **[scale.z] is the radius of the cylinder
/// **[scale.y] is the height of the cylinder
/// **[scale.x] is the length of the arc of cylinder
/// * Limitations
/// **Only the half of the cylinder can be displayed, which means the arc angle has to be smaller than 180 degree, [scale.x] / [scale.z] <= PI
/// **Your camera has to be inside of the inscribed sphere of the cylinder, the overlay will be faded out automatically when the camera is close to the inscribed sphere's surface.
/// **Translation only works correctly with vrDriver 1.04 or above
/// 3. Cubemap: [Mobile Only], Display overlay as a cube map
/// </summary>
public class OVROverlay : MonoBehaviour
{
public enum OverlayShape
{
Quad = 0, // Display overlay as a quad
Cylinder = 1, // [Mobile Only][Experimental] Display overlay as a cylinder, Translation only works correctly with vrDriver 1.04 or above
Cubemap = 2, // [Mobile Only] Display overlay as a cube map
}
public enum OverlayType
{
None, // Disabled the overlay
Underlay, // Eye buffers blend on top
Overlay, // Blends on top of the eye buffer
OverlayShowLod // (Deprecated) Blends on top and colorizes texture level of detail
};
#if UNITY_ANDROID && !UNITY_EDITOR
const int maxInstances = 3;
#else
const int maxInstances = 15;
#endif
static OVROverlay[] instances = new OVROverlay[maxInstances];
/// <summary>
/// Specify overlay's type
/// </summary>
public OverlayType currentOverlayType = OverlayType.Overlay;
/// <summary>
/// Specify overlay's shape
/// </summary>
public OverlayShape currentOverlayShape = OverlayShape.Quad;
/// <summary>
/// Try to avoid setting texture frequently when app is running, texNativePtr updating is slow since rendering thread synchronization
/// Please cache your nativeTexturePtr and use OverrideOverlayTextureInfo
/// </summary>
public Texture[] textures = new Texture[] { null, null };
private Texture[] cachedTextures = new Texture[] { null, null };
private IntPtr[] texNativePtrs = new IntPtr[] { IntPtr.Zero, IntPtr.Zero };
private int layerIndex = -1;
Renderer rend;
/// <summary>
/// Use this function to set texture and texNativePtr when app is running
/// GetNativeTexturePtr is a slow behavior, the value should be pre-cached
/// </summary>
public void OverrideOverlayTextureInfo(Texture srcTexture, IntPtr nativePtr, VR.VRNode node)
{
int index = (node == VR.VRNode.RightEye) ? 1 : 0;
textures[index] = srcTexture;
cachedTextures[index] = srcTexture;
texNativePtrs[index] = nativePtr;
}
void Awake()
{
Debug.Log("Overlay Awake");
rend = GetComponent<Renderer>();
for (int i = 0; i < 2; ++i)
{
// Backward compatibility
if (rend != null && textures[i] == null)
textures[i] = rend.material.mainTexture;
if (textures[i] != null)
{
cachedTextures[i] = textures[i];
texNativePtrs[i] = textures[i].GetNativeTexturePtr();
}
}
}
void OnEnable()
{
if (!OVRManager.isHmdPresent)
{
enabled = false;
return;
}
OnDisable();
for (int i = 0; i < maxInstances; ++i)
{
if (instances[i] == null || instances[i] == this)
{
layerIndex = i;
instances[i] = this;
break;
}
}
}
void OnDisable()
{
if (layerIndex != -1)
{
// Turn off the overlay if it was on.
OVRPlugin.SetOverlayQuad(true, false, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero, OVRPose.identity.ToPosef(), Vector3.one.ToVector3f(), layerIndex);
instances[layerIndex] = null;
}
layerIndex = -1;
}
void OnRenderObject()
{
// The overlay must be specified every eye frame, because it is positioned relative to the
// current head location. If frames are dropped, it will be time warped appropriately,
// just like the eye buffers.
if (!Camera.current.CompareTag("MainCamera") || Camera.current.cameraType != CameraType.Game || layerIndex == -1 || currentOverlayType == OverlayType.None)
return;
#if !UNITY_ANDROID || UNITY_EDITOR
if (currentOverlayShape == OverlayShape.Cylinder)
{
Debug.LogWarning("Overlay shape " + currentOverlayShape + " is not supported on current platform");
}
#endif
for (int i = 0; i < 2; ++i)
{
if (i >= textures.Length)
continue;
if (textures[i] != cachedTextures[i])
{
cachedTextures[i] = textures[i];
if (cachedTextures[i] != null)
texNativePtrs[i] = cachedTextures[i].GetNativeTexturePtr();
}
if (currentOverlayShape == OverlayShape.Cubemap)
{
if (textures[i] != null && textures[i].GetType() != typeof(Cubemap))
{
Debug.LogError("Need Cubemap texture for cube map overlay");
return;
}
}
}
if (cachedTextures[0] == null || texNativePtrs[0] == IntPtr.Zero)
return;
bool overlay = (currentOverlayType == OverlayType.Overlay);
bool headLocked = false;
for (var t = transform; t != null && !headLocked; t = t.parent)
headLocked |= (t == Camera.current.transform);
OVRPose pose = (headLocked) ? transform.ToHeadSpacePose() : transform.ToTrackingSpacePose();
Vector3 scale = transform.lossyScale;
for (int i = 0; i < 3; ++i)
scale[i] /= Camera.current.transform.lossyScale[i];
#if !UNITY_ANDROID
if (currentOverlayShape == OverlayShape.Cubemap)
{
pose.position = Camera.current.transform.position;
}
#endif
// Cylinder overlay sanity checking
if (currentOverlayShape == OverlayShape.Cylinder)
{
float arcAngle = scale.x / scale.z / (float)Math.PI * 180.0f;
if (arcAngle > 180.0f)
{
Debug.LogError("Cylinder overlay's arc angle has to be below 180 degree, current arc angle is " + arcAngle + " degree." );
return ;
}
}
bool isOverlayVisible = OVRPlugin.SetOverlayQuad(overlay, headLocked, texNativePtrs[0], texNativePtrs[1], IntPtr.Zero, pose.flipZ().ToPosef(), scale.ToVector3f(), layerIndex, (OVRPlugin.OverlayShape)currentOverlayShape);
if (rend)
rend.enabled = !isOverlayVisible;
}
}
| |
#region License
// Copyright (c) 2007 James Newton-King
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
#endregion
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
#if NET20
using Newtonsoft.Json.Utilities.LinqBridge;
#else
using System.Linq;
#endif
using System.Runtime.Serialization;
using System.Text;
using System.Xml;
#if !NETFX_CORE
using NUnit.Framework;
#else
using Microsoft.VisualStudio.TestPlatform.UnitTestFramework;
using TestFixture = Microsoft.VisualStudio.TestPlatform.UnitTestFramework.TestClassAttribute;
using Test = Microsoft.VisualStudio.TestPlatform.UnitTestFramework.TestMethodAttribute;
#endif
using Newtonsoft.Json;
using System.IO;
using Newtonsoft.Json.Converters;
using Newtonsoft.Json.Utilities;
namespace Newtonsoft.Json.Tests
{
[TestFixture]
public class JsonTextWriterTest : TestFixtureBase
{
[Test]
public void QuoteNameAndStrings()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
JsonTextWriter writer = new JsonTextWriter(sw) { QuoteName = false };
writer.WriteStartObject();
writer.WritePropertyName("name");
writer.WriteValue("value");
writer.WriteEndObject();
writer.Flush();
Assert.AreEqual(@"{name:""value""}", sb.ToString());
}
[Test]
public void CloseOutput()
{
MemoryStream ms = new MemoryStream();
JsonTextWriter writer = new JsonTextWriter(new StreamWriter(ms));
Assert.IsTrue(ms.CanRead);
writer.Close();
Assert.IsFalse(ms.CanRead);
ms = new MemoryStream();
writer = new JsonTextWriter(new StreamWriter(ms)) { CloseOutput = false };
Assert.IsTrue(ms.CanRead);
writer.Close();
Assert.IsTrue(ms.CanRead);
}
#if !(PORTABLE || NETFX_CORE)
[Test]
public void WriteIConvertable()
{
var sw = new StringWriter();
JsonTextWriter writer = new JsonTextWriter(sw);
writer.WriteValue(new ConvertibleInt(1));
Assert.AreEqual("1", sw.ToString());
}
#endif
[Test]
public void ValueFormatting()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.WriteStartArray();
jsonWriter.WriteValue('@');
jsonWriter.WriteValue("\r\n\t\f\b?{\\r\\n\"\'");
jsonWriter.WriteValue(true);
jsonWriter.WriteValue(10);
jsonWriter.WriteValue(10.99);
jsonWriter.WriteValue(0.99);
jsonWriter.WriteValue(0.000000000000000001d);
jsonWriter.WriteValue(0.000000000000000001m);
jsonWriter.WriteValue((string)null);
jsonWriter.WriteValue((object)null);
jsonWriter.WriteValue("This is a string.");
jsonWriter.WriteNull();
jsonWriter.WriteUndefined();
jsonWriter.WriteEndArray();
}
string expected = @"[""@"",""\r\n\t\f\b?{\\r\\n\""'"",true,10,10.99,0.99,1E-18,0.000000000000000001,null,null,""This is a string."",null,undefined]";
string result = sb.ToString();
Console.WriteLine("ValueFormatting");
Console.WriteLine(result);
Assert.AreEqual(expected, result);
}
[Test]
public void NullableValueFormatting()
{
StringWriter sw = new StringWriter();
using (JsonTextWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.WriteStartArray();
jsonWriter.WriteValue((char?)null);
jsonWriter.WriteValue((char?)'c');
jsonWriter.WriteValue((bool?)null);
jsonWriter.WriteValue((bool?)true);
jsonWriter.WriteValue((byte?)null);
jsonWriter.WriteValue((byte?)1);
jsonWriter.WriteValue((sbyte?)null);
jsonWriter.WriteValue((sbyte?)1);
jsonWriter.WriteValue((short?)null);
jsonWriter.WriteValue((short?)1);
jsonWriter.WriteValue((ushort?)null);
jsonWriter.WriteValue((ushort?)1);
jsonWriter.WriteValue((int?)null);
jsonWriter.WriteValue((int?)1);
jsonWriter.WriteValue((uint?)null);
jsonWriter.WriteValue((uint?)1);
jsonWriter.WriteValue((long?)null);
jsonWriter.WriteValue((long?)1);
jsonWriter.WriteValue((ulong?)null);
jsonWriter.WriteValue((ulong?)1);
jsonWriter.WriteValue((double?)null);
jsonWriter.WriteValue((double?)1.1);
jsonWriter.WriteValue((float?)null);
jsonWriter.WriteValue((float?)1.1);
jsonWriter.WriteValue((decimal?)null);
jsonWriter.WriteValue((decimal?)1.1m);
jsonWriter.WriteValue((DateTime?)null);
jsonWriter.WriteValue((DateTime?)new DateTime(DateTimeUtils.InitialJavaScriptDateTicks, DateTimeKind.Utc));
#if !NET20
jsonWriter.WriteValue((DateTimeOffset?)null);
jsonWriter.WriteValue((DateTimeOffset?)new DateTimeOffset(DateTimeUtils.InitialJavaScriptDateTicks, TimeSpan.Zero));
#endif
jsonWriter.WriteEndArray();
}
string json = sw.ToString();
string expected;
#if !NET20
expected = @"[null,""c"",null,true,null,1,null,1,null,1,null,1,null,1,null,1,null,1,null,1,null,1.1,null,1.1,null,1.1,null,""1970-01-01T00:00:00Z"",null,""1970-01-01T00:00:00+00:00""]";
#else
expected = @"[null,""c"",null,true,null,1,null,1,null,1,null,1,null,1,null,1,null,1,null,1,null,1.1,null,1.1,null,1.1,null,""1970-01-01T00:00:00Z""]";
#endif
Assert.AreEqual(expected, json);
}
[Test]
public void WriteValueObjectWithNullable()
{
StringWriter sw = new StringWriter();
using (JsonTextWriter jsonWriter = new JsonTextWriter(sw))
{
char? value = 'c';
jsonWriter.WriteStartArray();
jsonWriter.WriteValue((object)value);
jsonWriter.WriteEndArray();
}
string json = sw.ToString();
string expected = @"[""c""]";
Assert.AreEqual(expected, json);
}
[Test]
public void WriteValueObjectWithUnsupportedValue()
{
ExceptionAssert.Throws<JsonWriterException>(
@"Unsupported type: System.Version. Use the JsonSerializer class to get the object's JSON representation. Path ''.",
() =>
{
StringWriter sw = new StringWriter();
using (JsonTextWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.WriteStartArray();
jsonWriter.WriteValue(new Version(1, 1, 1, 1));
jsonWriter.WriteEndArray();
}
});
}
[Test]
public void StringEscaping()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.WriteStartArray();
jsonWriter.WriteValue(@"""These pretzels are making me thirsty!""");
jsonWriter.WriteValue("Jeff's house was burninated.");
jsonWriter.WriteValue(@"1. You don't talk about fight club.
2. You don't talk about fight club.");
jsonWriter.WriteValue("35% of\t statistics\n are made\r up.");
jsonWriter.WriteEndArray();
}
string expected = @"[""\""These pretzels are making me thirsty!\"""",""Jeff's house was burninated."",""1. You don't talk about fight club.\r\n2. You don't talk about fight club."",""35% of\t statistics\n are made\r up.""]";
string result = sb.ToString();
Console.WriteLine("StringEscaping");
Console.WriteLine(result);
Assert.AreEqual(expected, result);
}
[Test]
public void WriteEnd()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.Formatting = Formatting.Indented;
jsonWriter.WriteStartObject();
jsonWriter.WritePropertyName("CPU");
jsonWriter.WriteValue("Intel");
jsonWriter.WritePropertyName("PSU");
jsonWriter.WriteValue("500W");
jsonWriter.WritePropertyName("Drives");
jsonWriter.WriteStartArray();
jsonWriter.WriteValue("DVD read/writer");
jsonWriter.WriteComment("(broken)");
jsonWriter.WriteValue("500 gigabyte hard drive");
jsonWriter.WriteValue("200 gigabype hard drive");
jsonWriter.WriteEndObject();
Assert.AreEqual(WriteState.Start, jsonWriter.WriteState);
}
string expected = @"{
""CPU"": ""Intel"",
""PSU"": ""500W"",
""Drives"": [
""DVD read/writer""
/*(broken)*/,
""500 gigabyte hard drive"",
""200 gigabype hard drive""
]
}";
string result = sb.ToString();
Assert.AreEqual(expected, result);
}
[Test]
public void CloseWithRemainingContent()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.Formatting = Formatting.Indented;
jsonWriter.WriteStartObject();
jsonWriter.WritePropertyName("CPU");
jsonWriter.WriteValue("Intel");
jsonWriter.WritePropertyName("PSU");
jsonWriter.WriteValue("500W");
jsonWriter.WritePropertyName("Drives");
jsonWriter.WriteStartArray();
jsonWriter.WriteValue("DVD read/writer");
jsonWriter.WriteComment("(broken)");
jsonWriter.WriteValue("500 gigabyte hard drive");
jsonWriter.WriteValue("200 gigabype hard drive");
jsonWriter.Close();
}
string expected = @"{
""CPU"": ""Intel"",
""PSU"": ""500W"",
""Drives"": [
""DVD read/writer""
/*(broken)*/,
""500 gigabyte hard drive"",
""200 gigabype hard drive""
]
}";
string result = sb.ToString();
Assert.AreEqual(expected, result);
}
[Test]
public void Indenting()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.Formatting = Formatting.Indented;
jsonWriter.WriteStartObject();
jsonWriter.WritePropertyName("CPU");
jsonWriter.WriteValue("Intel");
jsonWriter.WritePropertyName("PSU");
jsonWriter.WriteValue("500W");
jsonWriter.WritePropertyName("Drives");
jsonWriter.WriteStartArray();
jsonWriter.WriteValue("DVD read/writer");
jsonWriter.WriteComment("(broken)");
jsonWriter.WriteValue("500 gigabyte hard drive");
jsonWriter.WriteValue("200 gigabype hard drive");
jsonWriter.WriteEnd();
jsonWriter.WriteEndObject();
Assert.AreEqual(WriteState.Start, jsonWriter.WriteState);
}
// {
// "CPU": "Intel",
// "PSU": "500W",
// "Drives": [
// "DVD read/writer"
// /*(broken)*/,
// "500 gigabyte hard drive",
// "200 gigabype hard drive"
// ]
// }
string expected = @"{
""CPU"": ""Intel"",
""PSU"": ""500W"",
""Drives"": [
""DVD read/writer""
/*(broken)*/,
""500 gigabyte hard drive"",
""200 gigabype hard drive""
]
}";
string result = sb.ToString();
Assert.AreEqual(expected, result);
}
[Test]
public void State()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
Assert.AreEqual(WriteState.Start, jsonWriter.WriteState);
jsonWriter.WriteStartObject();
Assert.AreEqual(WriteState.Object, jsonWriter.WriteState);
Assert.AreEqual("", jsonWriter.Path);
jsonWriter.WritePropertyName("CPU");
Assert.AreEqual(WriteState.Property, jsonWriter.WriteState);
Assert.AreEqual("CPU", jsonWriter.Path);
jsonWriter.WriteValue("Intel");
Assert.AreEqual(WriteState.Object, jsonWriter.WriteState);
Assert.AreEqual("CPU", jsonWriter.Path);
jsonWriter.WritePropertyName("Drives");
Assert.AreEqual(WriteState.Property, jsonWriter.WriteState);
Assert.AreEqual("Drives", jsonWriter.Path);
jsonWriter.WriteStartArray();
Assert.AreEqual(WriteState.Array, jsonWriter.WriteState);
jsonWriter.WriteValue("DVD read/writer");
Assert.AreEqual(WriteState.Array, jsonWriter.WriteState);
Assert.AreEqual("Drives[0]", jsonWriter.Path);
jsonWriter.WriteEnd();
Assert.AreEqual(WriteState.Object, jsonWriter.WriteState);
Assert.AreEqual("Drives", jsonWriter.Path);
jsonWriter.WriteEndObject();
Assert.AreEqual(WriteState.Start, jsonWriter.WriteState);
Assert.AreEqual("", jsonWriter.Path);
}
}
[Test]
public void FloatingPointNonFiniteNumbers_Symbol()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.Formatting = Formatting.Indented;
jsonWriter.FloatFormatHandling = FloatFormatHandling.Symbol;
jsonWriter.WriteStartArray();
jsonWriter.WriteValue(double.NaN);
jsonWriter.WriteValue(double.PositiveInfinity);
jsonWriter.WriteValue(double.NegativeInfinity);
jsonWriter.WriteValue(float.NaN);
jsonWriter.WriteValue(float.PositiveInfinity);
jsonWriter.WriteValue(float.NegativeInfinity);
jsonWriter.WriteEndArray();
jsonWriter.Flush();
}
string expected = @"[
NaN,
Infinity,
-Infinity,
NaN,
Infinity,
-Infinity
]";
string result = sb.ToString();
Assert.AreEqual(expected, result);
}
[Test]
public void FloatingPointNonFiniteNumbers_Zero()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.Formatting = Formatting.Indented;
jsonWriter.FloatFormatHandling = FloatFormatHandling.DefaultValue;
jsonWriter.WriteStartArray();
jsonWriter.WriteValue(double.NaN);
jsonWriter.WriteValue(double.PositiveInfinity);
jsonWriter.WriteValue(double.NegativeInfinity);
jsonWriter.WriteValue(float.NaN);
jsonWriter.WriteValue(float.PositiveInfinity);
jsonWriter.WriteValue(float.NegativeInfinity);
jsonWriter.WriteValue((double?)double.NaN);
jsonWriter.WriteValue((double?)double.PositiveInfinity);
jsonWriter.WriteValue((double?)double.NegativeInfinity);
jsonWriter.WriteValue((float?)float.NaN);
jsonWriter.WriteValue((float?)float.PositiveInfinity);
jsonWriter.WriteValue((float?)float.NegativeInfinity);
jsonWriter.WriteEndArray();
jsonWriter.Flush();
}
string expected = @"[
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
null,
null,
null,
null,
null,
null
]";
string result = sb.ToString();
Assert.AreEqual(expected, result);
}
[Test]
public void FloatingPointNonFiniteNumbers_String()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.Formatting = Formatting.Indented;
jsonWriter.FloatFormatHandling = FloatFormatHandling.String;
jsonWriter.WriteStartArray();
jsonWriter.WriteValue(double.NaN);
jsonWriter.WriteValue(double.PositiveInfinity);
jsonWriter.WriteValue(double.NegativeInfinity);
jsonWriter.WriteValue(float.NaN);
jsonWriter.WriteValue(float.PositiveInfinity);
jsonWriter.WriteValue(float.NegativeInfinity);
jsonWriter.WriteEndArray();
jsonWriter.Flush();
}
string expected = @"[
""NaN"",
""Infinity"",
""-Infinity"",
""NaN"",
""Infinity"",
""-Infinity""
]";
string result = sb.ToString();
Assert.AreEqual(expected, result);
}
[Test]
public void FloatingPointNonFiniteNumbers_QuoteChar()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonTextWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.Formatting = Formatting.Indented;
jsonWriter.FloatFormatHandling = FloatFormatHandling.String;
jsonWriter.QuoteChar = '\'';
jsonWriter.WriteStartArray();
jsonWriter.WriteValue(double.NaN);
jsonWriter.WriteValue(double.PositiveInfinity);
jsonWriter.WriteValue(double.NegativeInfinity);
jsonWriter.WriteValue(float.NaN);
jsonWriter.WriteValue(float.PositiveInfinity);
jsonWriter.WriteValue(float.NegativeInfinity);
jsonWriter.WriteEndArray();
jsonWriter.Flush();
}
string expected = @"[
'NaN',
'Infinity',
'-Infinity',
'NaN',
'Infinity',
'-Infinity'
]";
string result = sb.ToString();
Assert.AreEqual(expected, result);
}
[Test]
public void WriteRawInStart()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.Formatting = Formatting.Indented;
jsonWriter.FloatFormatHandling = FloatFormatHandling.Symbol;
jsonWriter.WriteRaw("[1,2,3,4,5]");
jsonWriter.WriteWhitespace(" ");
jsonWriter.WriteStartArray();
jsonWriter.WriteValue(double.NaN);
jsonWriter.WriteEndArray();
}
string expected = @"[1,2,3,4,5] [
NaN
]";
string result = sb.ToString();
Assert.AreEqual(expected, result);
}
[Test]
public void WriteRawInArray()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.Formatting = Formatting.Indented;
jsonWriter.FloatFormatHandling = FloatFormatHandling.Symbol;
jsonWriter.WriteStartArray();
jsonWriter.WriteValue(double.NaN);
jsonWriter.WriteRaw(",[1,2,3,4,5]");
jsonWriter.WriteRaw(",[1,2,3,4,5]");
jsonWriter.WriteValue(float.NaN);
jsonWriter.WriteEndArray();
}
string expected = @"[
NaN,[1,2,3,4,5],[1,2,3,4,5],
NaN
]";
string result = sb.ToString();
Assert.AreEqual(expected, result);
}
[Test]
public void WriteRawInObject()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.Formatting = Formatting.Indented;
jsonWriter.WriteStartObject();
jsonWriter.WriteRaw(@"""PropertyName"":[1,2,3,4,5]");
jsonWriter.WriteEnd();
}
string expected = @"{""PropertyName"":[1,2,3,4,5]}";
string result = sb.ToString();
Assert.AreEqual(expected, result);
}
[Test]
public void WriteToken()
{
JsonTextReader reader = new JsonTextReader(new StringReader("[1,2,3,4,5]"));
reader.Read();
reader.Read();
StringWriter sw = new StringWriter();
JsonTextWriter writer = new JsonTextWriter(sw);
writer.WriteToken(reader);
Assert.AreEqual("1", sw.ToString());
}
[Test]
public void WriteRawValue()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
int i = 0;
string rawJson = "[1,2]";
jsonWriter.WriteStartObject();
while (i < 3)
{
jsonWriter.WritePropertyName("d" + i);
jsonWriter.WriteRawValue(rawJson);
i++;
}
jsonWriter.WriteEndObject();
}
Assert.AreEqual(@"{""d0"":[1,2],""d1"":[1,2],""d2"":[1,2]}", sb.ToString());
}
[Test]
public void WriteObjectNestedInConstructor()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.WriteStartObject();
jsonWriter.WritePropertyName("con");
jsonWriter.WriteStartConstructor("Ext.data.JsonStore");
jsonWriter.WriteStartObject();
jsonWriter.WritePropertyName("aa");
jsonWriter.WriteValue("aa");
jsonWriter.WriteEndObject();
jsonWriter.WriteEndConstructor();
jsonWriter.WriteEndObject();
}
Assert.AreEqual(@"{""con"":new Ext.data.JsonStore({""aa"":""aa""})}", sb.ToString());
}
[Test]
public void WriteFloatingPointNumber()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.FloatFormatHandling = FloatFormatHandling.Symbol;
jsonWriter.WriteStartArray();
jsonWriter.WriteValue(0.0);
jsonWriter.WriteValue(0f);
jsonWriter.WriteValue(0.1);
jsonWriter.WriteValue(1.0);
jsonWriter.WriteValue(1.000001);
jsonWriter.WriteValue(0.000001);
jsonWriter.WriteValue(double.Epsilon);
jsonWriter.WriteValue(double.PositiveInfinity);
jsonWriter.WriteValue(double.NegativeInfinity);
jsonWriter.WriteValue(double.NaN);
jsonWriter.WriteValue(double.MaxValue);
jsonWriter.WriteValue(double.MinValue);
jsonWriter.WriteValue(float.PositiveInfinity);
jsonWriter.WriteValue(float.NegativeInfinity);
jsonWriter.WriteValue(float.NaN);
jsonWriter.WriteEndArray();
}
Assert.AreEqual(@"[0.0,0.0,0.1,1.0,1.000001,1E-06,4.94065645841247E-324,Infinity,-Infinity,NaN,1.7976931348623157E+308,-1.7976931348623157E+308,Infinity,-Infinity,NaN]", sb.ToString());
}
[Test]
public void WriteIntegerNumber()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw) { Formatting = Formatting.Indented })
{
jsonWriter.WriteStartArray();
jsonWriter.WriteValue(int.MaxValue);
jsonWriter.WriteValue(int.MinValue);
jsonWriter.WriteValue(0);
jsonWriter.WriteValue(-0);
jsonWriter.WriteValue(9L);
jsonWriter.WriteValue(9UL);
jsonWriter.WriteValue(long.MaxValue);
jsonWriter.WriteValue(long.MinValue);
jsonWriter.WriteValue(ulong.MaxValue);
jsonWriter.WriteValue(ulong.MinValue);
jsonWriter.WriteEndArray();
}
Console.WriteLine(sb.ToString());
Assert.AreEqual(@"[
2147483647,
-2147483648,
0,
0,
9,
9,
9223372036854775807,
-9223372036854775808,
18446744073709551615,
0
]", sb.ToString());
}
[Test]
public void BadWriteEndArray()
{
ExceptionAssert.Throws<JsonWriterException>(
"No token to close. Path ''.",
() =>
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.WriteStartArray();
jsonWriter.WriteValue(0.0);
jsonWriter.WriteEndArray();
jsonWriter.WriteEndArray();
}
});
}
[Test]
public void InvalidQuoteChar()
{
ExceptionAssert.Throws<ArgumentException>(
@"Invalid JavaScript string quote character. Valid quote characters are ' and "".",
() =>
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonTextWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.Formatting = Formatting.Indented;
jsonWriter.QuoteChar = '*';
}
});
}
[Test]
public void Indentation()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonTextWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.Formatting = Formatting.Indented;
jsonWriter.FloatFormatHandling = FloatFormatHandling.Symbol;
Assert.AreEqual(Formatting.Indented, jsonWriter.Formatting);
jsonWriter.Indentation = 5;
Assert.AreEqual(5, jsonWriter.Indentation);
jsonWriter.IndentChar = '_';
Assert.AreEqual('_', jsonWriter.IndentChar);
jsonWriter.QuoteName = true;
Assert.AreEqual(true, jsonWriter.QuoteName);
jsonWriter.QuoteChar = '\'';
Assert.AreEqual('\'', jsonWriter.QuoteChar);
jsonWriter.WriteStartObject();
jsonWriter.WritePropertyName("propertyName");
jsonWriter.WriteValue(double.NaN);
jsonWriter.WriteEndObject();
}
string expected = @"{
_____'propertyName': NaN
}";
string result = sb.ToString();
Assert.AreEqual(expected, result);
}
[Test]
public void WriteSingleBytes()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
string text = "Hello world.";
byte[] data = Encoding.UTF8.GetBytes(text);
using (JsonTextWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.Formatting = Formatting.Indented;
Assert.AreEqual(Formatting.Indented, jsonWriter.Formatting);
jsonWriter.WriteValue(data);
}
string expected = @"""SGVsbG8gd29ybGQu""";
string result = sb.ToString();
Assert.AreEqual(expected, result);
byte[] d2 = Convert.FromBase64String(result.Trim('"'));
Assert.AreEqual(text, Encoding.UTF8.GetString(d2, 0, d2.Length));
}
[Test]
public void WriteBytesInArray()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
string text = "Hello world.";
byte[] data = Encoding.UTF8.GetBytes(text);
using (JsonTextWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.Formatting = Formatting.Indented;
Assert.AreEqual(Formatting.Indented, jsonWriter.Formatting);
jsonWriter.WriteStartArray();
jsonWriter.WriteValue(data);
jsonWriter.WriteValue(data);
jsonWriter.WriteValue((object)data);
jsonWriter.WriteValue((byte[])null);
jsonWriter.WriteValue((Uri)null);
jsonWriter.WriteEndArray();
}
string expected = @"[
""SGVsbG8gd29ybGQu"",
""SGVsbG8gd29ybGQu"",
""SGVsbG8gd29ybGQu"",
null,
null
]";
string result = sb.ToString();
Assert.AreEqual(expected, result);
}
[Test]
public void Path()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
string text = "Hello world.";
byte[] data = Encoding.UTF8.GetBytes(text);
using (JsonTextWriter writer = new JsonTextWriter(sw))
{
writer.Formatting = Formatting.Indented;
writer.WriteStartArray();
Assert.AreEqual("", writer.Path);
writer.WriteStartObject();
Assert.AreEqual("[0]", writer.Path);
writer.WritePropertyName("Property1");
Assert.AreEqual("[0].Property1", writer.Path);
writer.WriteStartArray();
Assert.AreEqual("[0].Property1", writer.Path);
writer.WriteValue(1);
Assert.AreEqual("[0].Property1[0]", writer.Path);
writer.WriteStartArray();
Assert.AreEqual("[0].Property1[1]", writer.Path);
writer.WriteStartArray();
Assert.AreEqual("[0].Property1[1][0]", writer.Path);
writer.WriteStartArray();
Assert.AreEqual("[0].Property1[1][0][0]", writer.Path);
writer.WriteEndObject();
Assert.AreEqual("[0]", writer.Path);
writer.WriteStartObject();
Assert.AreEqual("[1]", writer.Path);
writer.WritePropertyName("Property2");
Assert.AreEqual("[1].Property2", writer.Path);
writer.WriteStartConstructor("Constructor1");
Assert.AreEqual("[1].Property2", writer.Path);
writer.WriteNull();
Assert.AreEqual("[1].Property2[0]", writer.Path);
writer.WriteStartArray();
Assert.AreEqual("[1].Property2[1]", writer.Path);
writer.WriteValue(1);
Assert.AreEqual("[1].Property2[1][0]", writer.Path);
writer.WriteEnd();
Assert.AreEqual("[1].Property2[1]", writer.Path);
writer.WriteEndObject();
Assert.AreEqual("[1]", writer.Path);
writer.WriteEndArray();
Assert.AreEqual("", writer.Path);
}
Assert.AreEqual(@"[
{
""Property1"": [
1,
[
[
[]
]
]
]
},
{
""Property2"": new Constructor1(
null,
[
1
]
)
}
]", sb.ToString());
}
[Test]
public void BuildStateArray()
{
JsonWriter.State[][] stateArray = JsonWriter.BuildStateArray();
var valueStates = JsonWriter.StateArrayTempate[7];
foreach (JsonToken valueToken in EnumUtils.GetValues(typeof(JsonToken)))
{
switch (valueToken)
{
case JsonToken.Integer:
case JsonToken.Float:
case JsonToken.String:
case JsonToken.Boolean:
case JsonToken.Null:
case JsonToken.Undefined:
case JsonToken.Date:
case JsonToken.Bytes:
Assert.AreEqual(valueStates, stateArray[(int)valueToken], "Error for " + valueToken + " states.");
break;
}
}
}
[Test]
public void DateTimeZoneHandling()
{
StringWriter sw = new StringWriter();
JsonTextWriter writer = new JsonTextWriter(sw)
{
DateTimeZoneHandling = Json.DateTimeZoneHandling.Utc
};
writer.WriteValue(new DateTime(2000, 1, 1, 1, 1, 1, DateTimeKind.Unspecified));
Assert.AreEqual(@"""2000-01-01T01:01:01Z""", sw.ToString());
}
[Test]
public void HtmlStringEscapeHandling()
{
StringWriter sw = new StringWriter();
JsonTextWriter writer = new JsonTextWriter(sw)
{
StringEscapeHandling = StringEscapeHandling.EscapeHtml
};
string script = @"<script type=""text/javascript"">alert('hi');</script>";
writer.WriteValue(script);
string json = sw.ToString();
Assert.AreEqual(@"""\u003cscript type=\u0022text/javascript\u0022\u003ealert(\u0027hi\u0027);\u003c/script\u003e""", json);
JsonTextReader reader = new JsonTextReader(new StringReader(json));
Assert.AreEqual(script, reader.ReadAsString());
//Console.WriteLine(HttpUtility.HtmlEncode(script));
//System.Web.Script.Serialization.JavaScriptSerializer s = new System.Web.Script.Serialization.JavaScriptSerializer();
//Console.WriteLine(s.Serialize(new { html = script }));
}
[Test]
public void NonAsciiStringEscapeHandling()
{
StringWriter sw = new StringWriter();
JsonTextWriter writer = new JsonTextWriter(sw)
{
StringEscapeHandling = StringEscapeHandling.EscapeNonAscii
};
string unicode = "\u5f20";
writer.WriteValue(unicode);
string json = sw.ToString();
Assert.AreEqual(8, json.Length);
Assert.AreEqual(@"""\u5f20""", json);
JsonTextReader reader = new JsonTextReader(new StringReader(json));
Assert.AreEqual(unicode, reader.ReadAsString());
sw = new StringWriter();
writer = new JsonTextWriter(sw)
{
StringEscapeHandling = StringEscapeHandling.Default
};
writer.WriteValue(unicode);
json = sw.ToString();
Assert.AreEqual(3, json.Length);
Assert.AreEqual("\"\u5f20\"", json);
}
[Test]
public void WriteEndOnProperty()
{
StringWriter sw = new StringWriter();
JsonTextWriter writer = new JsonTextWriter(sw);
writer.QuoteChar = '\'';
writer.WriteStartObject();
writer.WritePropertyName("Blah");
writer.WriteEnd();
Assert.AreEqual("{'Blah':null}", sw.ToString());
}
#if !NET20
[Test]
public void QuoteChar()
{
StringWriter sw = new StringWriter();
JsonTextWriter writer = new JsonTextWriter(sw);
writer.Formatting = Formatting.Indented;
writer.QuoteChar = '\'';
writer.WriteStartArray();
writer.WriteValue(new DateTime(2000, 1, 1, 1, 1, 1, DateTimeKind.Utc));
writer.WriteValue(new DateTimeOffset(2000, 1, 1, 1, 1, 1, TimeSpan.Zero));
writer.DateFormatHandling = DateFormatHandling.MicrosoftDateFormat;
writer.WriteValue(new DateTime(2000, 1, 1, 1, 1, 1, DateTimeKind.Utc));
writer.WriteValue(new DateTimeOffset(2000, 1, 1, 1, 1, 1, TimeSpan.Zero));
writer.DateFormatString = "yyyy gg";
writer.WriteValue(new DateTime(2000, 1, 1, 1, 1, 1, DateTimeKind.Utc));
writer.WriteValue(new DateTimeOffset(2000, 1, 1, 1, 1, 1, TimeSpan.Zero));
writer.WriteValue(new byte[] { 1, 2, 3 });
writer.WriteValue(TimeSpan.Zero);
writer.WriteValue(new Uri("http://www.google.com/"));
writer.WriteValue(Guid.Empty);
writer.WriteEnd();
Assert.AreEqual(@"[
'2000-01-01T01:01:01Z',
'2000-01-01T01:01:01+00:00',
'\/Date(946688461000)\/',
'\/Date(946688461000+0000)\/',
'2000 A.D.',
'2000 A.D.',
'AQID',
'00:00:00',
'http://www.google.com/',
'00000000-0000-0000-0000-000000000000'
]", sw.ToString());
}
[Test]
public void Culture()
{
StringWriter sw = new StringWriter();
JsonTextWriter writer = new JsonTextWriter(sw);
writer.Formatting = Formatting.Indented;
writer.DateFormatString = "yyyy tt";
writer.Culture = new CultureInfo("en-NZ");
writer.QuoteChar = '\'';
writer.WriteStartArray();
writer.WriteValue(new DateTime(2000, 1, 1, 1, 1, 1, DateTimeKind.Utc));
writer.WriteValue(new DateTimeOffset(2000, 1, 1, 1, 1, 1, TimeSpan.Zero));
writer.WriteEnd();
Assert.AreEqual(@"[
'2000 a.m.',
'2000 a.m.'
]", sw.ToString());
}
#endif
[Test]
public void CompareNewStringEscapingWithOld()
{
Console.WriteLine("Started");
char c = (char)0;
do
{
if (c % 1000 == 0)
Console.WriteLine("Position: " + (int)c);
StringWriter swNew = new StringWriter();
char[] buffer = null;
JavaScriptUtils.WriteEscapedJavaScriptString(swNew, c.ToString(), '"', true, JavaScriptUtils.DoubleQuoteCharEscapeFlags, StringEscapeHandling.Default, ref buffer);
StringWriter swOld = new StringWriter();
WriteEscapedJavaScriptStringOld(swOld, c.ToString(), '"', true);
string newText = swNew.ToString();
string oldText = swOld.ToString();
if (newText != oldText)
throw new Exception("Difference for char '{0}' (value {1}). Old text: {2}, New text: {3}".FormatWith(CultureInfo.InvariantCulture, c, (int)c, oldText, newText));
c++;
} while (c != char.MaxValue);
Console.WriteLine("Finished");
}
private const string EscapedUnicodeText = "!";
private static void WriteEscapedJavaScriptStringOld(TextWriter writer, string s, char delimiter, bool appendDelimiters)
{
// leading delimiter
if (appendDelimiters)
writer.Write(delimiter);
if (s != null)
{
char[] chars = null;
char[] unicodeBuffer = null;
int lastWritePosition = 0;
for (int i = 0; i < s.Length; i++)
{
var c = s[i];
// don't escape standard text/numbers except '\' and the text delimiter
if (c >= ' ' && c < 128 && c != '\\' && c != delimiter)
continue;
string escapedValue;
switch (c)
{
case '\t':
escapedValue = @"\t";
break;
case '\n':
escapedValue = @"\n";
break;
case '\r':
escapedValue = @"\r";
break;
case '\f':
escapedValue = @"\f";
break;
case '\b':
escapedValue = @"\b";
break;
case '\\':
escapedValue = @"\\";
break;
case '\u0085': // Next Line
escapedValue = @"\u0085";
break;
case '\u2028': // Line Separator
escapedValue = @"\u2028";
break;
case '\u2029': // Paragraph Separator
escapedValue = @"\u2029";
break;
case '\'':
// this charater is being used as the delimiter
escapedValue = @"\'";
break;
case '"':
// this charater is being used as the delimiter
escapedValue = "\\\"";
break;
default:
if (c <= '\u001f')
{
if (unicodeBuffer == null)
unicodeBuffer = new char[6];
StringUtils.ToCharAsUnicode(c, unicodeBuffer);
// slightly hacky but it saves multiple conditions in if test
escapedValue = EscapedUnicodeText;
}
else
{
escapedValue = null;
}
break;
}
if (escapedValue == null)
continue;
if (i > lastWritePosition)
{
if (chars == null)
chars = s.ToCharArray();
// write unchanged chars before writing escaped text
writer.Write(chars, lastWritePosition, i - lastWritePosition);
}
lastWritePosition = i + 1;
if (!string.Equals(escapedValue, EscapedUnicodeText))
writer.Write(escapedValue);
else
writer.Write(unicodeBuffer);
}
if (lastWritePosition == 0)
{
// no escaped text, write entire string
writer.Write(s);
}
else
{
if (chars == null)
chars = s.ToCharArray();
// write remaining text
writer.Write(chars, lastWritePosition, s.Length - lastWritePosition);
}
}
// trailing delimiter
if (appendDelimiters)
writer.Write(delimiter);
}
[Test]
public void CustomJsonTextWriterTests()
{
StringWriter sw = new StringWriter();
CustomJsonTextWriter writer = new CustomJsonTextWriter(sw) { Formatting = Formatting.Indented };
writer.WriteStartObject();
Assert.AreEqual(WriteState.Object, writer.WriteState);
writer.WritePropertyName("Property1");
Assert.AreEqual(WriteState.Property, writer.WriteState);
Assert.AreEqual("Property1", writer.Path);
writer.WriteNull();
Assert.AreEqual(WriteState.Object, writer.WriteState);
writer.WriteEndObject();
Assert.AreEqual(WriteState.Start, writer.WriteState);
Assert.AreEqual(@"{{{
""1ytreporP"": NULL!!!
}}}", sw.ToString());
}
[Test]
public void QuoteDictionaryNames()
{
var d = new Dictionary<string, int>
{
{ "a", 1 },
};
var jsonSerializerSettings = new JsonSerializerSettings
{
Formatting = Formatting.Indented,
};
var serializer = JsonSerializer.Create(jsonSerializerSettings);
using (var stringWriter = new StringWriter())
{
using (var writer = new JsonTextWriter(stringWriter) { QuoteName = false })
{
serializer.Serialize(writer, d);
writer.Close();
}
Assert.AreEqual(@"{
a: 1
}", stringWriter.ToString());
}
}
[Test]
public void WriteComments()
{
string json = @"//comment*//*hi*/
{//comment
Name://comment
true//comment after true" + StringUtils.CarriageReturn + @"
,//comment after comma" + StringUtils.CarriageReturnLineFeed + @"
""ExpiryDate""://comment" + StringUtils.LineFeed + @"
new
" + StringUtils.LineFeed +
@"Constructor
(//comment
null//comment
),
""Price"": 3.99,
""Sizes"": //comment
[//comment
""Small""//comment
]//comment
}//comment
//comment 1 ";
JsonTextReader r = new JsonTextReader(new StringReader(json));
StringWriter sw = new StringWriter();
JsonTextWriter w = new JsonTextWriter(sw);
w.Formatting = Formatting.Indented;
w.WriteToken(r, true);
Assert.AreEqual(@"/*comment*//*hi*/*/{/*comment*/
""Name"": /*comment*/ true/*comment after true*//*comment after comma*/,
""ExpiryDate"": /*comment*/ new Constructor(
/*comment*/,
null
/*comment*/
),
""Price"": 3.99,
""Sizes"": /*comment*/ [
/*comment*/
""Small""
/*comment*/
]/*comment*/
}/*comment *//*comment 1 */", sw.ToString());
}
}
public class CustomJsonTextWriter : JsonTextWriter
{
private readonly TextWriter _writer;
public CustomJsonTextWriter(TextWriter textWriter) : base(textWriter)
{
_writer = textWriter;
}
public override void WritePropertyName(string name)
{
WritePropertyName(name, true);
}
public override void WritePropertyName(string name, bool escape)
{
SetWriteState(JsonToken.PropertyName, name);
if (QuoteName)
_writer.Write(QuoteChar);
_writer.Write(new string(name.ToCharArray().Reverse().ToArray()));
if (QuoteName)
_writer.Write(QuoteChar);
_writer.Write(':');
}
public override void WriteNull()
{
SetWriteState(JsonToken.Null, null);
_writer.Write("NULL!!!");
}
public override void WriteStartObject()
{
SetWriteState(JsonToken.StartObject, null);
_writer.Write("{{{");
}
public override void WriteEndObject()
{
SetWriteState(JsonToken.EndObject, null);
}
protected override void WriteEnd(JsonToken token)
{
if (token == JsonToken.EndObject)
_writer.Write("}}}");
else
base.WriteEnd(token);
}
}
#if !(PORTABLE || NETFX_CORE)
public struct ConvertibleInt : IConvertible
{
private readonly int _value;
public ConvertibleInt(int value)
{
_value = value;
}
public TypeCode GetTypeCode()
{
return TypeCode.Int32;
}
public bool ToBoolean(IFormatProvider provider)
{
throw new NotImplementedException();
}
public byte ToByte(IFormatProvider provider)
{
throw new NotImplementedException();
}
public char ToChar(IFormatProvider provider)
{
throw new NotImplementedException();
}
public DateTime ToDateTime(IFormatProvider provider)
{
throw new NotImplementedException();
}
public decimal ToDecimal(IFormatProvider provider)
{
throw new NotImplementedException();
}
public double ToDouble(IFormatProvider provider)
{
throw new NotImplementedException();
}
public short ToInt16(IFormatProvider provider)
{
throw new NotImplementedException();
}
public int ToInt32(IFormatProvider provider)
{
throw new NotImplementedException();
}
public long ToInt64(IFormatProvider provider)
{
throw new NotImplementedException();
}
public sbyte ToSByte(IFormatProvider provider)
{
throw new NotImplementedException();
}
public float ToSingle(IFormatProvider provider)
{
throw new NotImplementedException();
}
public string ToString(IFormatProvider provider)
{
throw new NotImplementedException();
}
public object ToType(Type conversionType, IFormatProvider provider)
{
if (conversionType == typeof(int))
return _value;
throw new Exception("Type not supported: " + conversionType.FullName);
}
public ushort ToUInt16(IFormatProvider provider)
{
throw new NotImplementedException();
}
public uint ToUInt32(IFormatProvider provider)
{
throw new NotImplementedException();
}
public ulong ToUInt64(IFormatProvider provider)
{
throw new NotImplementedException();
}
}
#endif
}
| |
//------------------------------------------------------------------------------
// <copyright file="DataBindingCollection.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//------------------------------------------------------------------------------
namespace System.Web.UI {
using System;
using System.Collections;
using System.Collections.Specialized;
using System.ComponentModel;
using System.ComponentModel.Design;
using System.Data;
using System.Web.Util;
using System.Security.Permissions;
/// <devdoc>
/// </devdoc>
public sealed class DataBindingCollection : ICollection {
private EventHandler changedEvent;
private Hashtable bindings;
private Hashtable removedBindings;
/// <devdoc>
/// </devdoc>
public DataBindingCollection() {
this.bindings = new Hashtable(StringComparer.OrdinalIgnoreCase);
}
/// <devdoc>
/// </devdoc>
public int Count {
get {
return bindings.Count;
}
}
/// <devdoc>
/// </devdoc>
public bool IsReadOnly {
get {
return false;
}
}
/// <devdoc>
/// </devdoc>
public bool IsSynchronized {
get {
return false;
}
}
/// <devdoc>
/// </devdoc>
public string[] RemovedBindings {
get {
int bindingCount = 0;
ICollection keys = null;
if (removedBindings != null) {
keys = removedBindings.Keys;
bindingCount = keys.Count;
string[] removedNames = new string[bindingCount];
int i = 0;
foreach (string s in keys) {
removedNames[i++] = s;
}
removedBindings.Clear();
return removedNames;
}
else {
return new string[0];
}
}
}
/// <devdoc>
/// </devdoc>
private Hashtable RemovedBindingsTable {
get {
if (removedBindings == null) {
removedBindings = new Hashtable(StringComparer.OrdinalIgnoreCase);
}
return removedBindings;
}
}
/// <devdoc>
/// </devdoc>
public object SyncRoot {
get {
return this;
}
}
/// <devdoc>
/// </devdoc>
public DataBinding this[string propertyName] {
get {
object o = bindings[propertyName];
if (o != null)
return(DataBinding)o;
return null;
}
}
public event EventHandler Changed {
add {
changedEvent = (EventHandler)Delegate.Combine(changedEvent, value);
}
remove {
changedEvent = (EventHandler)Delegate.Remove(changedEvent, value);
}
}
/// <devdoc>
/// </devdoc>
public void Add(DataBinding binding) {
bindings[binding.PropertyName] = binding;
RemovedBindingsTable.Remove(binding.PropertyName);
OnChanged();
}
/// <devdoc>
/// </devdoc>
public bool Contains(string propertyName) {
return bindings.Contains(propertyName);
}
/// <devdoc>
/// </devdoc>
public void Clear() {
ICollection keys = bindings.Keys;
if ((keys.Count != 0) && (removedBindings == null)) {
// ensure the removedBindings hashtable is created
Hashtable h = RemovedBindingsTable;
}
foreach (string s in keys) {
removedBindings[s] = String.Empty;
}
bindings.Clear();
OnChanged();
}
/// <devdoc>
/// </devdoc>
public void CopyTo(Array array, int index) {
for (IEnumerator e = this.GetEnumerator(); e.MoveNext();)
array.SetValue(e.Current, index++);
}
/// <devdoc>
/// </devdoc>
public IEnumerator GetEnumerator() {
return bindings.Values.GetEnumerator();
}
private void OnChanged() {
if (changedEvent != null) {
changedEvent(this, EventArgs.Empty);
}
}
/// <devdoc>
/// </devdoc>
public void Remove(string propertyName) {
Remove(propertyName, true);
}
/// <devdoc>
/// </devdoc>
public void Remove(DataBinding binding) {
Remove(binding.PropertyName, true);
}
/// <devdoc>
/// </devdoc>
public void Remove(string propertyName, bool addToRemovedList) {
if (Contains(propertyName)) {
bindings.Remove(propertyName);
if (addToRemovedList) {
RemovedBindingsTable[propertyName] = String.Empty;
}
OnChanged();
}
}
}
}
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using Hyak.Common;
using Microsoft.Azure.Management.Internal.Resources.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
namespace Microsoft.Azure.Management.Internal.Resources
{
public partial class ResourceManagementClient : ServiceClient<ResourceManagementClient>, IResourceManagementClient
{
private string _apiVersion;
/// <summary>
/// Gets the API version.
/// </summary>
public string ApiVersion
{
get { return this._apiVersion; }
}
private Uri _baseUri;
/// <summary>
/// Gets the URI used as the base for all cloud service requests.
/// </summary>
public Uri BaseUri
{
get { return this._baseUri; }
}
private SubscriptionCloudCredentials _credentials;
/// <summary>
/// Gets subscription credentials which uniquely identify Microsoft
/// Azure subscription. The subscription ID forms part of the URI for
/// every service call.
/// </summary>
public SubscriptionCloudCredentials Credentials
{
get { return this._credentials; }
}
private int _longRunningOperationInitialTimeout;
/// <summary>
/// Gets or sets the initial timeout for Long Running Operations.
/// </summary>
public int LongRunningOperationInitialTimeout
{
get { return this._longRunningOperationInitialTimeout; }
set { this._longRunningOperationInitialTimeout = value; }
}
private int _longRunningOperationRetryTimeout;
/// <summary>
/// Gets or sets the retry timeout for Long Running Operations.
/// </summary>
public int LongRunningOperationRetryTimeout
{
get { return this._longRunningOperationRetryTimeout; }
set { this._longRunningOperationRetryTimeout = value; }
}
private IDeploymentOperationOperations _deploymentOperations;
/// <summary>
/// Operations for managing deployment operations.
/// </summary>
public virtual IDeploymentOperationOperations DeploymentOperations
{
get { return this._deploymentOperations; }
}
private IDeploymentOperations _deployments;
/// <summary>
/// Operations for managing deployments.
/// </summary>
public virtual IDeploymentOperations Deployments
{
get { return this._deployments; }
}
private IProviderOperations _providers;
/// <summary>
/// Operations for managing providers.
/// </summary>
public virtual IProviderOperations Providers
{
get { return this._providers; }
}
private IProviderOperationsMetadataOperations _providerOperationsMetadata;
/// <summary>
/// Operations for getting provider operations metadata.
/// </summary>
public virtual IProviderOperationsMetadataOperations ProviderOperationsMetadata
{
get { return this._providerOperationsMetadata; }
}
private IResourceGroupOperations _resourceGroups;
/// <summary>
/// Operations for managing resource groups.
/// </summary>
public virtual IResourceGroupOperations ResourceGroups
{
get { return this._resourceGroups; }
}
private IResourceOperations _resources;
/// <summary>
/// Operations for managing resources.
/// </summary>
public virtual IResourceOperations Resources
{
get { return this._resources; }
}
private IResourceProviderOperationDetailsOperations _resourceProviderOperationDetails;
/// <summary>
/// Operations for managing Resource provider operations.
/// </summary>
public virtual IResourceProviderOperationDetailsOperations ResourceProviderOperationDetails
{
get { return this._resourceProviderOperationDetails; }
}
private ITagOperations _tags;
/// <summary>
/// Operations for managing tags.
/// </summary>
public virtual ITagOperations Tags
{
get { return this._tags; }
}
/// <summary>
/// Initializes a new instance of the ResourceManagementClient class.
/// </summary>
public ResourceManagementClient()
: base()
{
this._deploymentOperations = new DeploymentOperationOperations(this);
this._deployments = new DeploymentOperations(this);
this._providers = new ProviderOperations(this);
this._providerOperationsMetadata = new ProviderOperationsMetadataOperations(this);
this._resourceGroups = new ResourceGroupOperations(this);
this._resources = new ResourceOperations(this);
this._resourceProviderOperationDetails = new ResourceProviderOperationDetailsOperations(this);
this._tags = new TagOperations(this);
this._apiVersion = "2014-04-01-preview";
this._longRunningOperationInitialTimeout = -1;
this._longRunningOperationRetryTimeout = -1;
this.HttpClient.Timeout = TimeSpan.FromSeconds(300);
}
/// <summary>
/// Initializes a new instance of the ResourceManagementClient class.
/// </summary>
/// <param name='credentials'>
/// Required. Gets subscription credentials which uniquely identify
/// Microsoft Azure subscription. The subscription ID forms part of
/// the URI for every service call.
/// </param>
/// <param name='baseUri'>
/// Optional. Gets the URI used as the base for all cloud service
/// requests.
/// </param>
public ResourceManagementClient(SubscriptionCloudCredentials credentials, Uri baseUri)
: this()
{
if (credentials == null)
{
throw new ArgumentNullException("credentials");
}
if (baseUri == null)
{
throw new ArgumentNullException("baseUri");
}
this._credentials = credentials;
this._baseUri = baseUri;
this.Credentials.InitializeServiceClient(this);
}
/// <summary>
/// Initializes a new instance of the ResourceManagementClient class.
/// </summary>
/// <param name='credentials'>
/// Required. Gets subscription credentials which uniquely identify
/// Microsoft Azure subscription. The subscription ID forms part of
/// the URI for every service call.
/// </param>
public ResourceManagementClient(SubscriptionCloudCredentials credentials)
: this()
{
if (credentials == null)
{
throw new ArgumentNullException("credentials");
}
this._credentials = credentials;
this._baseUri = new Uri("https://management.azure.com/");
this.Credentials.InitializeServiceClient(this);
}
/// <summary>
/// Initializes a new instance of the ResourceManagementClient class.
/// </summary>
/// <param name='httpClient'>
/// The Http client
/// </param>
public ResourceManagementClient(HttpClient httpClient)
: base(httpClient)
{
this._deploymentOperations = new DeploymentOperationOperations(this);
this._deployments = new DeploymentOperations(this);
this._providers = new ProviderOperations(this);
this._providerOperationsMetadata = new ProviderOperationsMetadataOperations(this);
this._resourceGroups = new ResourceGroupOperations(this);
this._resources = new ResourceOperations(this);
this._resourceProviderOperationDetails = new ResourceProviderOperationDetailsOperations(this);
this._tags = new TagOperations(this);
this._apiVersion = "2014-04-01-preview";
this._longRunningOperationInitialTimeout = -1;
this._longRunningOperationRetryTimeout = -1;
this.HttpClient.Timeout = TimeSpan.FromSeconds(300);
}
/// <summary>
/// Initializes a new instance of the ResourceManagementClient class.
/// </summary>
/// <param name='credentials'>
/// Required. Gets subscription credentials which uniquely identify
/// Microsoft Azure subscription. The subscription ID forms part of
/// the URI for every service call.
/// </param>
/// <param name='baseUri'>
/// Optional. Gets the URI used as the base for all cloud service
/// requests.
/// </param>
/// <param name='httpClient'>
/// The Http client
/// </param>
public ResourceManagementClient(SubscriptionCloudCredentials credentials, Uri baseUri, HttpClient httpClient)
: this(httpClient)
{
if (credentials == null)
{
throw new ArgumentNullException("credentials");
}
if (baseUri == null)
{
throw new ArgumentNullException("baseUri");
}
this._credentials = credentials;
this._baseUri = baseUri;
this.Credentials.InitializeServiceClient(this);
}
/// <summary>
/// Initializes a new instance of the ResourceManagementClient class.
/// </summary>
/// <param name='credentials'>
/// Required. Gets subscription credentials which uniquely identify
/// Microsoft Azure subscription. The subscription ID forms part of
/// the URI for every service call.
/// </param>
/// <param name='httpClient'>
/// The Http client
/// </param>
public ResourceManagementClient(SubscriptionCloudCredentials credentials, HttpClient httpClient)
: this(httpClient)
{
if (credentials == null)
{
throw new ArgumentNullException("credentials");
}
this._credentials = credentials;
this._baseUri = new Uri("https://management.azure.com/");
this.Credentials.InitializeServiceClient(this);
}
/// <summary>
/// Clones properties from current instance to another
/// ResourceManagementClient instance
/// </summary>
/// <param name='client'>
/// Instance of ResourceManagementClient to clone to
/// </param>
protected override void Clone(ServiceClient<ResourceManagementClient> client)
{
base.Clone(client);
if (client is ResourceManagementClient)
{
ResourceManagementClient clonedClient = ((ResourceManagementClient)client);
clonedClient._credentials = this._credentials;
clonedClient._baseUri = this._baseUri;
clonedClient._apiVersion = this._apiVersion;
clonedClient._longRunningOperationInitialTimeout = this._longRunningOperationInitialTimeout;
clonedClient._longRunningOperationRetryTimeout = this._longRunningOperationRetryTimeout;
clonedClient.Credentials.InitializeServiceClient(clonedClient);
}
}
/// <summary>
/// The Get Operation Status operation returns the status of the
/// specified operation. After calling an asynchronous operation, you
/// can call Get Operation Status to determine whether the operation
/// has succeeded, failed, or is still in progress.
/// </summary>
/// <param name='operationStatusLink'>
/// Required. Location value returned by the Begin operation.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// A standard service response for long running operations.
/// </returns>
public async Task<LongRunningOperationResponse> GetLongRunningOperationStatusAsync(string operationStatusLink, CancellationToken cancellationToken)
{
// Validate
if (operationStatusLink == null)
{
throw new ArgumentNullException("operationStatusLink");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("operationStatusLink", operationStatusLink);
TracingAdapter.Enter(invocationId, this, "GetLongRunningOperationStatusAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + operationStatusLink;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Get;
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.Add("x-ms-version", "2014-04-01-preview");
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK && statusCode != HttpStatusCode.Accepted && statusCode != HttpStatusCode.NoContent)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
LongRunningOperationResponse result = null;
// Deserialize Response
result = new LongRunningOperationResponse();
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (statusCode == HttpStatusCode.Conflict)
{
result.Status = OperationStatus.Failed;
}
if (statusCode == HttpStatusCode.NoContent)
{
result.Status = OperationStatus.Succeeded;
}
if (statusCode == HttpStatusCode.OK)
{
result.Status = OperationStatus.Succeeded;
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.Reflection;
using System.Threading;
using Microsoft.Scripting.Runtime;
using IronPython.Runtime.Operations;
namespace IronPython.Runtime.Types {
// Used to map signatures to specific targets on the embedded reflected method.
public class BuiltinFunctionOverloadMapper : ICodeFormattable {
private BuiltinFunction _function;
private object _instance;
private PythonTuple _allOverloads; // overloads are stored here and may be bound to an instance
public BuiltinFunctionOverloadMapper(BuiltinFunction builtinFunction, object instance) {
this._function = builtinFunction;
this._instance = instance;
}
public object this[params Type[] types] {
get {
return GetOverload(types, Targets);
}
}
protected object GetOverload(Type[] sig, IList<MethodBase> targets) {
return GetOverload(sig, targets, true);
}
private object GetOverload(Type[] sig, IList<MethodBase> targets, bool wrapCtors) {
// We can still end up with more than one target since generic and non-generic
// methods can share the same name and signature. So we'll build up a new
// reflected method with all the candidate targets. A caller can then index this
// reflected method if necessary in order to provide generic type arguments and
// fully disambiguate the target.
BuiltinFunction bf;
BuiltinFunction.TypeList tl = new BuiltinFunction.TypeList(sig);
lock (_function.OverloadDictionary) {
if (!_function.OverloadDictionary.TryGetValue(tl, out bf)) {
MethodBase[] newTargets = FindMatchingTargets(sig, targets, true);
if (newTargets.Length == 0) {
// If no overload was found, check also using CodeContext for backward compatibility
newTargets = FindMatchingTargets(sig, targets, false);
}
if (newTargets.Length == 0) {
ThrowOverloadException(sig, targets);
} else {
_function.OverloadDictionary[tl] = bf = new BuiltinFunction(_function.Name, newTargets, Function.DeclaringType, _function.FunctionType);
}
}
}
if (_instance != null) {
return bf.BindToInstance(_instance);
} else if (wrapCtors) {
return GetTargetFunction(bf);
} else {
return bf;
}
}
/// <summary>
/// Find matching overloads by checking signature against available targets
/// </summary>
/// <param name="sig">Given signature</param>
/// <param name="targets">List of possible targets</param>
/// <param name="removeCodeContext">If set to true, the method will check whether the first paramter of the
/// target is of the type CodeContext and removes it</param>
/// <returns>Possible overloads</returns>
private static MethodBase[] FindMatchingTargets(Type[] sig, IList<MethodBase> targets, bool removeCodeContext) {
int args = sig.Length;
List<MethodBase> res = new List<MethodBase>();
foreach (MethodBase mb in targets) {
ParameterInfo[] pis = mb.GetParameters();
if (removeCodeContext) {
if (pis.Length > 0 && pis[0].ParameterType == typeof(CodeContext)) {
// Copy array and skip CodeContext
var newPis = new ParameterInfo[pis.Length - 1];
for (int i = 1; i < pis.Length; i++) {
newPis[i - 1] = pis[i];
}
pis = newPis;
}
}
if (pis.Length != args)
continue;
// Check each parameter type for an exact match.
bool match = true;
for (int i = 0; i < args; i++)
if (pis[i].ParameterType != sig[i]) {
match = false;
break;
}
if (!match)
continue;
// Okay, we have a match, add it to the list.
res.Add(mb);
}
return res.ToArray();
}
/// <summary>
/// Throws a formatted exception if no overload matchs.
/// </summary>
/// <param name="sig">Passed signature which should be used</param>
/// <param name="targets">Given targets, which does not fit to the signature</param>
/// <example>
/// <code language="cs" title="Cause overload exceptiob"><
/// ]]></code>
/// </example>
public void ThrowOverloadException(Type[] sig, IList<MethodBase> targets) {
// Create info for given signature
System.Text.StringBuilder sigInfo = new System.Text.StringBuilder();
sigInfo.Append((targets.Count > 0 ? targets[0].Name : "") + "[");
foreach (var type in sig) {
if (!sigInfo.ToString().endswith("[")) {
sigInfo.Append(", ");
}
sigInfo.Append(type.Name);
}
sigInfo.Append("]");
// Get possible overloads.
System.Text.StringBuilder possibleOverloads = new System.Text.StringBuilder();
foreach (var overload in targets) {
if (possibleOverloads.Length > 0) {
possibleOverloads.Append(", ");
}
possibleOverloads.Append("[");
foreach (var param in overload.GetParameters()) {
if (!possibleOverloads.ToString().endswith("[")) {
possibleOverloads.Append(", ");
}
possibleOverloads.Append(param.ParameterType.Name);
}
possibleOverloads.Append("]");
}
throw ScriptingRuntimeHelpers.SimpleTypeError(String.Format("No match found for the method signature {0}. Expected {1}", sigInfo.ToString(),
possibleOverloads.ToString()));
}
public BuiltinFunction Function {
get {
return _function;
}
}
public virtual IList<MethodBase> Targets {
get {
return _function.Targets;
}
}
public PythonTuple Functions {
get {
if (_allOverloads == null) {
object[] res = new object[Targets.Count];
int writeIndex = 0;
foreach (MethodBase mb in Targets) {
ParameterInfo[] pis = mb.GetParameters();
Type[] types = new Type[pis.Length];
for (int i = 0; i < pis.Length; i++) {
types[i] = pis[i].ParameterType;
}
res[writeIndex++] = GetOverload(types, Targets, false);
}
Interlocked.CompareExchange(ref _allOverloads, PythonTuple.MakeTuple(res), null);
}
return _allOverloads;
}
}
protected virtual object GetTargetFunction(BuiltinFunction bf) {
return bf;
}
public virtual string/*!*/ __repr__(CodeContext/*!*/ context) {
PythonDictionary overloadList = new PythonDictionary();
foreach (MethodBase mb in Targets) {
string key = DocBuilder.CreateAutoDoc(mb);
overloadList[key] = Function;
}
return overloadList.__repr__(context);
}
}
public class ConstructorOverloadMapper : BuiltinFunctionOverloadMapper {
public ConstructorOverloadMapper(ConstructorFunction builtinFunction, object instance)
: base(builtinFunction, instance) {
}
public override IList<MethodBase> Targets {
get {
return ((ConstructorFunction)Function).ConstructorTargets;
}
}
protected override object GetTargetFunction(BuiltinFunction bf) {
// return a function that's bound to the overloads, we'll
// the user then calls this w/ the dynamic type, and the bound
// function drops the class & calls the overload.
if (bf.Targets[0].DeclaringType != typeof(InstanceOps)) {
return new ConstructorFunction(InstanceOps.OverloadedNew, bf.Targets).BindToInstance(bf);
}
return base.GetTargetFunction(bf);
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Reflection;
using System.Xml;
using OpenMetaverse;
using log4net;
using OpenSim.Framework;
using OpenSim.Region.Framework.Scenes;
using OpenSim.Region.Physics.Manager;
namespace OpenSim.Region.Framework.Scenes.Serialization
{
/// <summary>
/// Static methods to serialize and deserialize scene objects to and from XML
/// </summary>
public class SceneXmlLoader
{
private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
#region old xml format
public static void LoadPrimsFromXml(Scene scene, string fileName, bool newIDS, Vector3 loadOffset)
{
XmlDocument doc = new XmlDocument();
XmlNode rootNode;
if (fileName.StartsWith("http:") || File.Exists(fileName))
{
XmlTextReader reader = new XmlTextReader(fileName);
reader.WhitespaceHandling = WhitespaceHandling.None;
doc.Load(reader);
reader.Close();
rootNode = doc.FirstChild;
foreach (XmlNode aPrimNode in rootNode.ChildNodes)
{
SceneObjectGroup obj = SceneObjectSerializer.FromOriginalXmlFormat(aPrimNode.OuterXml);
if (newIDS)
{
obj.ResetIDs();
}
//if we want this to be a import method then we need new uuids for the object to avoid any clashes
//obj.RegenerateFullIDs();
scene.AddNewSceneObject(obj, true);
}
}
else
{
throw new Exception("Could not open file " + fileName + " for reading");
}
}
public static void SavePrimsToXml(Scene scene, string fileName)
{
FileStream file = new FileStream(fileName, FileMode.Create);
StreamWriter stream = new StreamWriter(file);
int primCount = 0;
stream.WriteLine("<scene>\n");
EntityBase[] entityList = scene.GetEntities();
foreach (EntityBase ent in entityList)
{
if (ent is SceneObjectGroup)
{
stream.WriteLine(SceneObjectSerializer.ToOriginalXmlFormat((SceneObjectGroup)ent));
primCount++;
}
}
stream.WriteLine("</scene>\n");
stream.Close();
file.Close();
}
#endregion
#region XML2 serialization
// Called by archives (save oar)
public static string SaveGroupToXml2(SceneObjectGroup grp, Dictionary<string, object> options)
{
//return SceneObjectSerializer.ToXml2Format(grp);
using (MemoryStream mem = new MemoryStream())
{
using (XmlTextWriter writer = new XmlTextWriter(mem, System.Text.Encoding.UTF8))
{
SceneObjectSerializer.SOGToXml2(writer, grp, options);
writer.Flush();
using (StreamReader reader = new StreamReader(mem))
{
mem.Seek(0, SeekOrigin.Begin);
return reader.ReadToEnd();
}
}
}
}
// Called by scene serializer (save xml2)
public static void SavePrimsToXml2(Scene scene, string fileName)
{
EntityBase[] entityList = scene.GetEntities();
SavePrimListToXml2(entityList, fileName);
}
// Called by scene serializer (save xml2)
public static void SaveNamedPrimsToXml2(Scene scene, string primName, string fileName)
{
m_log.InfoFormat(
"[SERIALISER]: Saving prims with name {0} in xml2 format for region {1} to {2}",
primName, scene.RegionInfo.RegionName, fileName);
EntityBase[] entityList = scene.GetEntities();
List<EntityBase> primList = new List<EntityBase>();
foreach (EntityBase ent in entityList)
{
if (ent is SceneObjectGroup)
{
if (ent.Name == primName)
{
primList.Add(ent);
}
}
}
SavePrimListToXml2(primList.ToArray(), fileName);
}
// Called by REST Application plugin
public static void SavePrimsToXml2(Scene scene, TextWriter stream, Vector3 min, Vector3 max)
{
EntityBase[] entityList = scene.GetEntities();
SavePrimListToXml2(entityList, stream, min, max);
}
// Called here only. Should be private?
public static void SavePrimListToXml2(EntityBase[] entityList, string fileName)
{
FileStream file = new FileStream(fileName, FileMode.Create);
try
{
StreamWriter stream = new StreamWriter(file);
try
{
SavePrimListToXml2(entityList, stream, Vector3.Zero, Vector3.Zero);
}
finally
{
stream.Close();
}
}
finally
{
file.Close();
}
}
// Called here only. Should be private?
public static void SavePrimListToXml2(EntityBase[] entityList, TextWriter stream, Vector3 min, Vector3 max)
{
XmlTextWriter writer = new XmlTextWriter(stream);
int primCount = 0;
stream.WriteLine("<scene>\n");
foreach (EntityBase ent in entityList)
{
if (ent is SceneObjectGroup)
{
SceneObjectGroup g = (SceneObjectGroup)ent;
if (!min.Equals(Vector3.Zero) || !max.Equals(Vector3.Zero))
{
Vector3 pos = g.RootPart.GetWorldPosition();
if (min.X > pos.X || min.Y > pos.Y || min.Z > pos.Z)
continue;
if (max.X < pos.X || max.Y < pos.Y || max.Z < pos.Z)
continue;
}
//stream.WriteLine(SceneObjectSerializer.ToXml2Format(g));
SceneObjectSerializer.SOGToXml2(writer, (SceneObjectGroup)ent, new Dictionary<string,object>());
stream.WriteLine();
primCount++;
}
}
stream.WriteLine("</scene>\n");
stream.Flush();
}
#endregion
#region XML2 deserialization
public static SceneObjectGroup DeserializeGroupFromXml2(string xmlString)
{
return SceneObjectSerializer.FromXml2Format(xmlString);
}
/// <summary>
/// Load prims from the xml2 format
/// </summary>
/// <param name="scene"></param>
/// <param name="fileName"></param>
public static void LoadPrimsFromXml2(Scene scene, string fileName)
{
LoadPrimsFromXml2(scene, new XmlTextReader(fileName), false);
}
/// <summary>
/// Load prims from the xml2 format
/// </summary>
/// <param name="scene"></param>
/// <param name="reader"></param>
/// <param name="startScripts"></param>
public static void LoadPrimsFromXml2(Scene scene, TextReader reader, bool startScripts)
{
LoadPrimsFromXml2(scene, new XmlTextReader(reader), startScripts);
}
/// <summary>
/// Load prims from the xml2 format. This method will close the reader
/// </summary>
/// <param name="scene"></param>
/// <param name="reader"></param>
/// <param name="startScripts"></param>
protected static void LoadPrimsFromXml2(Scene scene, XmlTextReader reader, bool startScripts)
{
XmlDocument doc = new XmlDocument();
reader.WhitespaceHandling = WhitespaceHandling.None;
doc.Load(reader);
reader.Close();
XmlNode rootNode = doc.FirstChild;
ICollection<SceneObjectGroup> sceneObjects = new List<SceneObjectGroup>();
foreach (XmlNode aPrimNode in rootNode.ChildNodes)
{
SceneObjectGroup obj = DeserializeGroupFromXml2(aPrimNode.OuterXml);
if (startScripts)
sceneObjects.Add(obj);
}
foreach (SceneObjectGroup sceneObject in sceneObjects)
{
sceneObject.CreateScriptInstances(0, true, scene.DefaultScriptEngine, 0);
sceneObject.ResumeScripts();
}
}
#endregion
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Collections;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Xunit;
namespace System.Collections.Concurrent.Tests
{
public class PartitionerStaticTests
{
[Fact]
public static void TestStaticPartitioningIList()
{
RunTestWithAlgorithm(dataSize: 11, partitionCount: 8, algorithm: 0);
RunTestWithAlgorithm(dataSize: 999, partitionCount: 1, algorithm: 0);
RunTestWithAlgorithm(dataSize: 10000, partitionCount: 11, algorithm: 0);
}
[Fact]
public static void TestStaticPartitioningArray()
{
RunTestWithAlgorithm(dataSize: 7, partitionCount: 4, algorithm: 1);
RunTestWithAlgorithm(dataSize: 123, partitionCount: 1, algorithm: 1);
RunTestWithAlgorithm(dataSize: 1000, partitionCount: 7, algorithm: 1);
}
[Fact]
public static void TestLoadBalanceIList()
{
RunTestWithAlgorithm(dataSize: 7, partitionCount: 4, algorithm: 2);
RunTestWithAlgorithm(dataSize: 123, partitionCount: 1, algorithm: 2);
RunTestWithAlgorithm(dataSize: 1000, partitionCount: 7, algorithm: 2);
}
[Fact]
public static void TestLoadBalanceArray()
{
RunTestWithAlgorithm(dataSize: 11, partitionCount: 8, algorithm: 3);
RunTestWithAlgorithm(dataSize: 999, partitionCount: 1, algorithm: 3);
RunTestWithAlgorithm(dataSize: 10000, partitionCount: 11, algorithm: 3);
}
[Fact]
public static void TestLoadBalanceEnumerator()
{
RunTestWithAlgorithm(dataSize: 7, partitionCount: 4, algorithm: 4);
RunTestWithAlgorithm(dataSize: 123, partitionCount: 1, algorithm: 4);
RunTestWithAlgorithm(dataSize: 1000, partitionCount: 7, algorithm: 4);
}
#region Dispose tests. The dispose logic of PartitionerStatic
// In the official dev unit test run, this test should be commented out
// - Each time we call GetDynamicPartitions method, we create an internal "reader enumerator" to read the
// source data, and we need to make sure that whenever the object returned by GetDynmaicPartitions is disposed,
// the "reader enumerator" is also disposed.
[Fact]
public static void TestDisposeException()
{
var data = new int[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 };
var enumerable = new DisposeTrackingEnumerable<int>(data);
var partitioner = Partitioner.Create(enumerable);
var partition = partitioner.GetDynamicPartitions();
IDisposable d = partition as IDisposable;
Assert.NotNull(d);
d.Dispose();
Assert.Throws<ObjectDisposedException>(() => { var enum1 = partition.GetEnumerator(); });
}
/// <summary>
/// Race in Partitioner's dynamic partitioning Dispose logic
/// After the fix, the partitioner created through Partitioner.Create(IEnumerable) has the following behavior:
/// 1. reference counting in static partitioning. All partitions need to be disposed explicitly
/// 2. no reference counting in dynamic partitioning. The partitioner need to be disposed explicity
/// </summary>
/// <returns></returns>
[Fact]
public static void RunDynamicPartitioningDispose()
{
var p = Partitioner.Create(new int[] { 0, 1 });
var d = p.GetDynamicPartitions();
using (var e = d.GetEnumerator())
{
while (e.MoveNext()) { }
}
// should not throw
using (var e = d.GetEnumerator()) { };
}
#endregion
[Fact]
public static void TestExceptions()
{
// Testing ArgumentNullException with data==null
// Test ArgumentNullException of source data
OrderablePartitioner<int> partitioner;
for (int algorithm = 0; algorithm < 5; algorithm++)
{
Assert.Throws<ArgumentNullException>(() => { partitioner = PartitioningWithAlgorithm<int>(null, algorithm); });
}
// Test NotSupportedException of Reset: already tested in RunTestWithAlgorithm
// Test InvalidOperationException: already tested in TestPartitioningCore
// Test ArgumentOutOfRangeException of partitionCount == 0
int[] data = new int[10000];
for (int i = 0; i < 10000; i++)
data[i] = i;
//test GetOrderablePartitions method for 0-4 algorithms, try to catch ArgumentOutOfRangeException
for (int algorithm = 0; algorithm < 5; algorithm++)
{
partitioner = PartitioningWithAlgorithm<int>(data, algorithm);
Assert.Throws<ArgumentOutOfRangeException>(() => { var partitions1 = partitioner.GetOrderablePartitions(0); });
}
}
[Fact]
public static void TestEmptyPartitions()
{
int[] data = new int[0];
// Test ArgumentNullException of source data
OrderablePartitioner<int> partitioner;
for (int algorithm = 0; algorithm < 5; algorithm++)
{
partitioner = PartitioningWithAlgorithm<int>(data, algorithm);
//test GetOrderablePartitions
var partitions1 = partitioner.GetOrderablePartitions(4);
//verify all partitions are empty
for (int i = 0; i < 4; i++)
{
Assert.False(partitions1[i].MoveNext(), "Should not be able to move next in an empty partition.");
}
//test GetOrderableDynamicPartitions
try
{
var partitions2 = partitioner.GetOrderableDynamicPartitions();
//verify all partitions are empty
var newPartition = partitions2.GetEnumerator();
Assert.False(newPartition.MoveNext(), "Should not be able to move next in an empty partition.");
}
catch (NotSupportedException)
{
Assert.True(IsStaticPartition(algorithm), "TestEmptyPartitions: IsStaticPartition(algorithm) should have been true.");
}
}
}
private static void RunTestWithAlgorithm(int dataSize, int partitionCount, int algorithm)
{
//we set up the KeyValuePair in the way that keys and values should always be the same
//for all partitioning algorithms. So that we can use a bitmap (boolarray) to check whether
//any elements are missing in the end.
int[] data = new int[dataSize];
for (int i = 0; i < dataSize; i++)
data[i] = i;
IEnumerator<KeyValuePair<long, int>>[] partitionsUnderTest = new IEnumerator<KeyValuePair<long, int>>[partitionCount];
//step 1: test GetOrderablePartitions
OrderablePartitioner<int> partitioner = PartitioningWithAlgorithm<int>(data, algorithm);
var partitions1 = partitioner.GetOrderablePartitions(partitionCount);
//convert it to partition array for testing
for (int i = 0; i < partitionCount; i++)
partitionsUnderTest[i] = partitions1[i];
Assert.Equal(partitionCount, partitions1.Count);
TestPartitioningCore(dataSize, partitionCount, data, IsStaticPartition(algorithm), partitionsUnderTest);
//step 2: test GetOrderableDynamicPartitions
bool gotException = false;
try
{
var partitions2 = partitioner.GetOrderableDynamicPartitions();
for (int i = 0; i < partitionCount; i++)
partitionsUnderTest[i] = partitions2.GetEnumerator();
TestPartitioningCore(dataSize, partitionCount, data, IsStaticPartition(algorithm), partitionsUnderTest);
}
catch (NotSupportedException)
{
//swallow this exception: static partitioning doesn't support GetOrderableDynamicPartitions
gotException = true;
}
Assert.False(IsStaticPartition(algorithm) && !gotException, "TestLoadBalanceIList: Failure: didn't catch \"NotSupportedException\" for static partitioning");
}
private static OrderablePartitioner<T> PartitioningWithAlgorithm<T>(T[] data, int algorithm)
{
switch (algorithm)
{
//static partitioning through IList
case (0):
return Partitioner.Create((IList<T>)data, false);
//static partitioning through Array
case (1):
return Partitioner.Create(data, false);
//dynamic partitioning through IList
case (2):
return Partitioner.Create((IList<T>)data, true);
//dynamic partitioning through Arrray
case (3):
return Partitioner.Create(data, true);
//dynamic partitioning through IEnumerator
case (4):
return Partitioner.Create((IEnumerable<T>)data);
default:
throw new InvalidOperationException("PartitioningWithAlgorithm: no such partitioning algorithm");
}
}
private static void TestPartitioningCore(int dataSize, int partitionCount, int[] data, bool staticPartitioning,
IEnumerator<KeyValuePair<long, int>>[] partitions)
{
bool[] boolarray = new bool[dataSize];
bool keysOrderedWithinPartition = true,
keysOrderedAcrossPartitions = true;
int enumCount = 0; //count how many elements are enumerated by all partitions
Task[] threadArray = new Task[partitionCount];
for (int i = 0; i < partitionCount; i++)
{
int my_i = i;
threadArray[i] = Task.Run(() =>
{
int localOffset = 0;
int lastElement = -1;
//variables to compute key/value consistency for static partitioning.
int quotient, remainder;
quotient = dataSize / partitionCount;
remainder = dataSize % partitionCount;
Assert.Throws<InvalidOperationException>(() => { var temp = partitions[my_i].Current; });
while (partitions[my_i].MoveNext())
{
int key = (int)partitions[my_i].Current.Key,
value = partitions[my_i].Current.Value;
Assert.Equal(key, value);
boolarray[key] = true;
Interlocked.Increment(ref enumCount);
//todo: check if keys are ordered increasingly within each partition.
keysOrderedWithinPartition &= (lastElement >= key);
lastElement = key;
//Only check this with static partitioning
//check keys are ordered across the partitions
if (staticPartitioning)
{
int originalPosition;
if (my_i < remainder)
originalPosition = localOffset + my_i * (quotient + 1);
else
originalPosition = localOffset + remainder * (quotient + 1) + (my_i - remainder) * quotient;
keysOrderedAcrossPartitions &= originalPosition == value;
}
localOffset++;
}
}
);
}
Task.WaitAll(threadArray);
if (keysOrderedWithinPartition)
Console.WriteLine("TestPartitioningCore: Keys are not strictly ordered within each partition");
// Only check this with static partitioning
//check keys are ordered across the partitions
Assert.False(staticPartitioning && !keysOrderedAcrossPartitions, "TestPartitioningCore: Keys are not strictly ordered across partitions");
//check data count
Assert.Equal(enumCount, dataSize);
//check if any elements are missing
foreach (var item in boolarray)
{
Assert.True(item);
}
}
//
// Try calling MoveNext on a Partitioner enumerator after that enumerator has already returned false.
//
[Fact]
public static void TestExtraMoveNext()
{
Partitioner<int>[] partitioners = new[]
{
Partitioner.Create(new int[] { 0 , 1, 2, 3, 4, 5}),
Partitioner.Create(new int[] { 0 , 1, 2, 3, 4, 5}, false),
Partitioner.Create(new int[] { 0 , 1, 2, 3, 4, 5}, true),
Partitioner.Create(new int[] { 0 }),
Partitioner.Create(new int[] { 0 }, false),
Partitioner.Create(new int[] { 0 }, true),
};
for (int i = 0; i < partitioners.Length; i++)
{
using (var ee = partitioners[i].GetPartitions(1)[0])
{
while (ee.MoveNext()) { }
Assert.False(ee.MoveNext(), "TestExtraMoveNext: FAILED. Partitioner " + i + ": First extra MoveNext expected to return false.");
Assert.False(ee.MoveNext(), "TestExtraMoveNext: FAILED. Partitioner " + i + ": Second extra MoveNext expected to return false.");
Assert.False(ee.MoveNext(), "TestExtraMoveNext: FAILED. Partitioner " + i + ": Third extra MoveNext expected to return false.");
}
}
}
#region Helper Methods / Classes
private class DisposeTrackingEnumerable<T> : IEnumerable<T>
{
protected IEnumerable<T> m_data;
List<DisposeTrackingEnumerator<T>> s_enumerators = new List<DisposeTrackingEnumerator<T>>();
public DisposeTrackingEnumerable(IEnumerable<T> enumerable)
{
m_data = enumerable;
}
System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator()
{
DisposeTrackingEnumerator<T> walker = new DisposeTrackingEnumerator<T>(m_data.GetEnumerator());
lock (s_enumerators)
{
s_enumerators.Add(walker);
}
return walker;
}
public IEnumerator<T> GetEnumerator()
{
DisposeTrackingEnumerator<T> walker = new DisposeTrackingEnumerator<T>(m_data.GetEnumerator());
lock (s_enumerators)
{
s_enumerators.Add(walker);
}
return walker;
}
public void AreEnumeratorsDisposed(string scenario)
{
for (int i = 0; i < s_enumerators.Count; i++)
{
Assert.True(s_enumerators[i].IsDisposed(),
String.Format("AreEnumeratorsDisposed: FAILED. enumerator {0} was not disposed for SCENARIO: {1}.", i, scenario));
}
}
}
/// <summary>
/// This is the Enumerator that DisposeTtracking Enumerable generates when GetEnumerator is called.
/// We are simply wrapping an Enumerator and tracking whether Dispose had been called or not.
/// </summary>
/// <typeparam name="T">The type of the element</typeparam>
private class DisposeTrackingEnumerator<T> : IEnumerator<T>
{
IEnumerator<T> m_elements;
bool disposed;
public DisposeTrackingEnumerator(IEnumerator<T> enumerator)
{
m_elements = enumerator;
disposed = false;
}
public Boolean MoveNext()
{
return m_elements.MoveNext();
}
public T Current
{
get { return m_elements.Current; }
}
Object System.Collections.IEnumerator.Current
{
get { return m_elements.Current; }
}
/// <summary>
/// Dispose the underlying Enumerator, and supresses finalization
/// so that we will not throw.
/// </summary>
public void Dispose()
{
GC.SuppressFinalize(this);
m_elements.Dispose();
disposed = true;
}
public void Reset()
{
m_elements.Reset();
}
public bool IsDisposed()
{
return disposed;
}
}
private static bool IsStaticPartition(int algorithm)
{
return algorithm < 2;
}
#endregion
}
}
| |
using CrystalDecisions.CrystalReports.Engine;
using CrystalDecisions.Windows.Forms;
using DpSdkEngLib;
using DPSDKOPSLib;
using Microsoft.VisualBasic;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Drawing;
using System.Diagnostics;
using System.Windows.Forms;
using System.Linq;
using System.Xml.Linq;
// ERROR: Not supported in C#: OptionDeclaration
using VB = Microsoft.VisualBasic;
namespace _4PosBackOffice.NET
{
internal partial class frmPricingMatrix : System.Windows.Forms.Form
{
bool loading;
int gID;
object[] gDataArray;
List<Label> lblColorFixed = new List<Label>();
List<Label> lblcolor = new List<Label>();
List<TextBox> txtUnit = new List<TextBox>();
List<TextBox> txtCase = new List<TextBox>();
private void loadLanguage()
{
//frmPricingMatrix = No Code [Detailed Pricing Matrices]
//rsLang.filter = "LanguageLayoutLnk_LanguageID=" & 0000
//If rsLang.RecordCount Then frmPricingMatrix.Caption = rsLang("LanguageLayoutLnk_Description"): frmPricingMatrix.RightToLeft = rsLang("LanguageLayoutLnk_RightTL")
//Function Unknown!
//Command1 = No Code []
//rsLang.filter = "LanguageLayoutLnk_LanguageID=" & 0000
//If rsLang.RecordCount Then Command1.Caption = rsLang("LanguageLayoutLnk_Description"): Command1.RightToLeft = rsLang("LanguageLayoutLnk_RightTL")
modRecordSet.rsLang.filter = "LanguageLayoutLnk_LanguageID=" + 1085;
//Print|Checked
if (modRecordSet.rsLang.RecordCount){cmdPrint.Text = modRecordSet.rsLang.Fields("LanguageLayoutLnk_Description").Value;cmdPrint.RightToLeft = modRecordSet.rsLang.Fields("LanguageLayoutLnk_RightTL").Value;}
modRecordSet.rsLang.filter = "LanguageLayoutLnk_LanguageID=" + 1004;
//Exit|Checked
if (modRecordSet.rsLang.RecordCount){cmdExit.Text = modRecordSet.rsLang.Fields("LanguageLayoutLnk_Description").Value;cmdExit.RightToLeft = modRecordSet.rsLang.Fields("LanguageLayoutLnk_RightTL").Value;}
//Note: Field is Dynamic with Default as below!
//lblHeading = No Code [Default Pricing Matrix for each Channel]
//rsLang.filter = "LanguageLayoutLnk_LanguageID=" & 0000
//If rsLang.RecordCount Then lblHeading.Caption = rsLang("LanguageLayoutLnk_Description"): lblHeading.RightToLeft = rsLang("LanguageLayoutLnk_RightTL")
//_lbl_2 = No Code [Unit Markup]
//rsLang.filter = "LanguageLayoutLnk_LanguageID=" & 0000
//If rsLang.RecordCount Then _lbl_2.Caption = rsLang("LanguageLayoutLnk_Description"): _lbl_2.RightToLeft = rsLang("LanguageLayoutLnk_RightTL")
//_lbl_5 = No Code [Case/Carton Markup]
//rsLang.filter = "LanguageLayoutLnk_LanguageID=" & 0000
//If rsLang.RecordCount Then _lbl_5.Caption = rsLang("LanguageLayoutLnk_Description"): _lbl_5.RightToLeft = rsLang("LanguageLayoutLnk_RightTL")
//lblPricingGroup(1) = No Code
//lblPricingGroup(2) = No Code
//lblPricingGroup(3) = No Code
//lblPricingGroup(4) = No Code
//lblPricingGroup(5) = No Code
//lblPricingGroup(6) = No Code
//lblPricingGroup(7) = No Code
//lblPricingGroup(8) = No Code
modRecordSet.rsHelp.filter = "Help_Section=0 AND Help_Form='" + this.Name + "'";
//UPGRADE_ISSUE: Form property frmPricingMatrix.ToolTip1 was not upgraded. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="CC4C7EC0-C903-48FC-ACCC-81861D12DA4A"'
if (modRecordSet.rsHelp.RecordCount)
this.ToolTip1 = modRecordSet.rsHelp.Fields("Help_ContextID").Value;
}
private void setup()
{
ADODB.Recordset rs = default(ADODB.Recordset);
short x = 0;
rs = modRecordSet.getRS(ref "SELECT Channel_Code FROM Channel ORDER BY ChannelID");
var _with1 = gridItem;
_with1.RowCount = 2;
_with1.Col = 10;
// Set FixedRows to 0 so you can use AddItem. You can't
// use AddItem with a FixedRow. You have to use the Text property
_with1.FixedRows = 0;
//.MergeCells = MSFlexGridLib.MergeCellsSettings.flexMergeFree 'Set the MergeCells property.
_with1.row = 0;
_with1.Col = 0;
_with1.set_ColWidth(0, 1300);
_with1.set_ColWidth(1, 700);
_with1.set_ColWidth(2, 700);
_with1.set_ColWidth(3, 700);
_with1.set_ColWidth(4, 700);
_with1.set_ColWidth(5, 700);
_with1.set_ColWidth(6, 700);
_with1.set_ColWidth(7, 700);
_with1.set_ColWidth(8, 700);
_with1.set_ColWidth(9, 700);
_with1.set_TextMatrix(0, 0, "Pack");
_with1.set_TextMatrix(0, 0, "QTY");
rs.MoveFirst();
for (x = 0; x <= 7; x++) {
_with1.set_TextMatrix(0, x + 2, rs.Fields("Channel_Code").Value);
rs.moveNext();
}
_with1.FixedCols = 2;
_with1.FixedRows = 1;
}
private void setup_NEW()
{
ADODB.Recordset rs = default(ADODB.Recordset);
short x = 0;
rs = modRecordSet.getRS(ref "SELECT Channel_Code FROM Channel ORDER BY ChannelID");
var _with2 = gridItem;
_with2.RowCount = 2;
_with2.Col = 10;
// Set FixedRows to 0 so you can use AddItem. You can't
// use AddItem with a FixedRow. You have to use the Text property
_with2.FixedRows = 0;
//.MergeCells = MSFlexGridLib.MergeCellsSettings.flexMergeFree 'Set the MergeCells property.
_with2.row = 0;
_with2.Col = 0;
_with2.set_ColWidth(0, 1300);
_with2.set_ColWidth(1, 700);
_with2.set_ColWidth(2, 700);
_with2.set_ColWidth(3, 700);
_with2.set_ColWidth(4, 700);
_with2.set_ColWidth(5, 700);
_with2.set_ColWidth(6, 700);
_with2.set_ColWidth(7, 700);
_with2.set_ColWidth(8, 700);
_with2.set_ColWidth(9, 700);
_with2.set_TextMatrix(0, 0, "Pack");
//.TextMatrix(0, 0) = "SHRINKS"
_with2.set_TextMatrix(0, 0, " ");
_with2.set_TextMatrix(0, 1, "QTY");
rs.MoveFirst();
for (x = 0; x <= 7; x++) {
_with2.set_TextMatrix(0, x + 2, rs.Fields("Channel_Code").Value);
rs.moveNext();
}
_with2.FixedCols = 2;
_with2.FixedRows = 1;
}
public void loadMatrix(ref int id)
{
loading = true;
int x = 0;
int lCNT = 0;
short lDepositQuantity = 0;
ADODB.Recordset rsMaster = default(ADODB.Recordset);
ADODB.Recordset rs = default(ADODB.Recordset);
int lPackSizeID = 0;
bool lColour = false;
gID = id;
setup();
rs = modRecordSet.getRS(ref "SELECT PricingGroup.* From PricingGroup WHERE (((PricingGroup.PricingGroupID)=" + gID + "));");
if (rs.BOF | rs.EOF) {
this.Close();
System.Environment.Exit(0);
} else {
lblHeading.Text = rs.Fields("PricingGroup_Name").Value;
for (x = 1; x <= 8; x++) {
txtUnit[x].Text = Strings.FormatNumber(rs.Fields("PricingGroup_Unit" + x).Value, 2);
txtCase[x].Text = Strings.FormatNumber(rs.Fields("PricingGroup_Case" + x).Value, 2);
}
}
rs.Close();
rsMaster = modRecordSet.getRS(ref "SELECT shrink.PricingGroup, PackSize.PackSizeID, PackSize.PackSize_Name, shrink.Quantity FROM [SELECT StockItem.StockItem_PricingGroupID AS PricingGroup, ShrinkItem.ShrinkItem_Quantity AS Quantity, StockItem.StockItem_PackSizeID AS PackSizeID FROM StockItem INNER JOIN ShrinkItem ON StockItem.StockItem_ShrinkID = ShrinkItem.ShrinkItem_ShrinkID GROUP BY StockItem.StockItem_PricingGroupID, ShrinkItem.ShrinkItem_Quantity, StockItem.StockItem_PackSizeID]. AS shrink LEFT JOIN PackSize ON shrink.PackSizeID = PackSize.PackSizeID Where (((shrink.PricingGroup) = " + gID + ")) ORDER BY PackSize.PackSize_Volume, PackSize.PackSizeID, shrink.Quantity;");
txtEdit.Visible = false;
var _with3 = gridItem;
_with3.RowCount = 1;
_with3.Visible = false;
lCNT = -1;
while (!(rsMaster.EOF)) {
if (lPackSizeID != rsMaster.Fields("PackSizeID").Value) {
lColour = !lColour;
_with3.Rows.Add(rsMaster.Fields("PackSize_Name").Value);
lPackSizeID = rsMaster.Fields("PackSizeID").Value;
} else {
_with3.Rows.Add("");
}
_with3.row = _with3.RowCount - 1;
_with3.Col = 0;
_with3.CellBackColor = lblColorFixed[System.Math.Abs(Convert.ToInt16(lColour))].BackColor;
_with3.Col = 1;
_with3.CellBackColor = lblColorFixed[System.Math.Abs(Convert.ToInt16(lColour))].BackColor;
_with3.Text = rsMaster.Fields("quantity").Value;
_with3.set_RowData(_with3.row, rsMaster.Fields("PackSizeID").Value);
for (x = 1; x <= 8; x++) {
//UPGRADE_WARNING: Couldn't resolve default property of object x. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="6A50421D-15FE-4896-8A1B-2EC21E9037B2"'
_with3.Col = x + 1;
//UPGRADE_WARNING: Couldn't resolve default property of object x. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="6A50421D-15FE-4896-8A1B-2EC21E9037B2"'
rs = modRecordSet.getRS(ref ref "SELECT PricingGroupChannelLnk.PricingGroupChannelLnk_Markup From PricingGroupChannelLnk WHERE (((PricingGroupChannelLnk.PricingGroupChannelLnk_PricingGroupID)=" + gID + ") AND ((PricingGroupChannelLnk.PricingGroupChannelLnk_PackSizeID)=" + rsMaster.Fields("PackSizeID").Value + ") AND ((PricingGroupChannelLnk.PricingGroupChannelLnk_Quantity)=" + rsMaster.Fields("quantity").Value + ") AND ((PricingGroupChannelLnk.PricingGroupChannelLnk_ChannelID)=" + x + "));");
if (rs.BOF | rs.EOF) {
_with3.CellBackColor = lblcolor[System.Math.Abs(Convert.ToInt16(lColour))].BackColor;
if (rsMaster.Fields("quantity").Value == 1) {
_with3.Text = txtUnit[x].Text;
} else {
_with3.Text = txtCase[x].Text;
}
} else {
_with3.CellBackColor = lblColorChanged.BackColor;
_with3.Text = Strings.FormatNumber(rs.Fields("PricingGroupChannelLnk_Markup").Value, 2);
}
rs.Close();
}
rsMaster.moveNext();
}
_with3.Visible = true;
loading = false;
if (gridItem.RowCount > 1) {
gridItem.row = 1;
gridItem.Col = 2;
} else {
txtEdit.Visible = false;
}
loading = false;
}
public void loadMatrix_NEW(ref int id)
{
loading = true;
int x = 0;
int lCNT = 0;
short lDepositQuantity = 0;
ADODB.Recordset rsMaster = default(ADODB.Recordset);
ADODB.Recordset rs = default(ADODB.Recordset);
int lPackSizeID = 0;
bool lColour = false;
gID = id;
setup();
rs = modRecordSet.getRS(ref "SELECT PricingGroup.* From PricingGroup WHERE (((PricingGroup.PricingGroupID)=" + gID + "));");
if (rs.BOF | rs.EOF) {
this.Close();
System.Environment.Exit(0);
} else {
lblHeading.Text = rs.Fields("PricingGroup_Name").Value;
for (x = 1; x <= 8; x++) {
//UPGRADE_WARNING: Couldn't resolve default property of object x. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="6A50421D-15FE-4896-8A1B-2EC21E9037B2"'
txtUnit[x].Text = Strings.FormatNumber(rs.Fields("PricingGroup_Unit" + x).Value, 2);
//UPGRADE_WARNING: Couldn't resolve default property of object x. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="6A50421D-15FE-4896-8A1B-2EC21E9037B2"'
txtCase[x].Text = Strings.FormatNumber(rs.Fields("PricingGroup_Case" + x).Value, 2);
}
}
rs.Close();
//Old PM
//Set rsMaster = getRS("SELECT shrink.PricingGroup, PackSize.PackSizeID, PackSize.PackSize_Name, shrink.Quantity FROM [SELECT StockItem.StockItem_PricingGroupID AS PricingGroup, ShrinkItem.ShrinkItem_Quantity AS Quantity, StockItem.StockItem_PackSizeID AS PackSizeID FROM StockItem INNER JOIN ShrinkItem ON StockItem.StockItem_ShrinkID = ShrinkItem.ShrinkItem_ShrinkID GROUP BY StockItem.StockItem_PricingGroupID, ShrinkItem.ShrinkItem_Quantity, StockItem.StockItem_PackSizeID]. AS shrink LEFT JOIN PackSize ON shrink.PackSizeID = PackSize.PackSizeID Where (((shrink.PricingGroup) = " & gID & ")) ORDER BY PackSize.PackSize_Volume, PackSize.PackSizeID, shrink.Quantity;")
//New PM
rsMaster = modRecordSet.getRS(ref "SELECT PricingGroup.PricingGroupID AS PricingGroup, Shrink.ShrinkID AS ShrinkID, Shrink.Shrink_Name AS Shrink_Name, ShrinkItem.ShrinkItem_Quantity AS Quantity FROM ((Shrink INNER JOIN ShrinkItem ON Shrink.ShrinkID = ShrinkItem.ShrinkItem_ShrinkID) INNER JOIN StockItem ON Shrink.ShrinkID = StockItem.StockItem_ShrinkID) INNER JOIN PricingGroup ON StockItem.StockItem_PricingGroupID = PricingGroup.PricingGroupID GROUP BY PricingGroup.PricingGroupID, Shrink.ShrinkID, Shrink.Shrink_Name, ShrinkItem.ShrinkItem_Quantity HAVING (((PricingGroup.PricingGroupID)=" + gID + ")) ORDER BY ShrinkItem.ShrinkItem_Quantity;");
txtEdit.Visible = false;
var _with4 = gridItem;
_with4.RowCount = 1;
_with4.Visible = false;
//UPGRADE_WARNING: Couldn't resolve default property of object lCNT. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="6A50421D-15FE-4896-8A1B-2EC21E9037B2"'
lCNT = -1;
while (!(rsMaster.EOF)) {
if (_with4.row > 0) {
for (x = 0; x <= _with4.row; x++) {
//UPGRADE_WARNING: Couldn't resolve default property of object x. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="6A50421D-15FE-4896-8A1B-2EC21E9037B2"'
if (gridItem.get_TextMatrix(ref ref x, ref ref 1) == rsMaster.Fields("quantity").Value)
goto skipRow;
}
}
//If lPackSizeID <> rsMaster("PackSizeID") Then
if (lPackSizeID != rsMaster.Fields("ShrinkID").Value) {
lColour = !lColour;
//.AddItem rsMaster("PackSize_Name")
//.AddItem rsMaster("Shrink_Name")
_with4.Rows.Add("");
//lPackSizeID = rsMaster("PackSizeID")
lPackSizeID = rsMaster.Fields("ShrinkID").Value;
} else {
_with4.Rows.Add("");
}
_with4.row = _with4.RowCount - 1;
_with4.Col = 0;
_with4.CellBackColor = lblColorFixed[System.Math.Abs(Convert.ToInt16(lColour))].BackColor;
_with4.Col = 1;
_with4.CellBackColor = lblColorFixed[System.Math.Abs(Convert.ToInt16(lColour))].BackColor;
_with4.Text = rsMaster.Fields("quantity").Value;
//.RowData(.row) = rsMaster("PackSizeID")
_with4.set_RowData(_with4.row, rsMaster.Fields("ShrinkID").Value);
for (x = 1; x <= 8; x++) {
//UPGRADE_WARNING: Couldn't resolve default property of object x. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="6A50421D-15FE-4896-8A1B-2EC21E9037B2"'
_with4.Col = x + 1;
//Set rs = getRS("SELECT PricingGroupChannelLnk.PricingGroupChannelLnk_Markup From PricingGroupChannelLnk WHERE (((PricingGroupChannelLnk.PricingGroupChannelLnk_PricingGroupID)=" & gID & ") AND ((PricingGroupChannelLnk.PricingGroupChannelLnk_PackSizeID)=" & rsMaster("PackSizeID") & ") AND ((PricingGroupChannelLnk.PricingGroupChannelLnk_Quantity)=" & rsMaster("quantity") & ") AND ((PricingGroupChannelLnk.PricingGroupChannelLnk_ChannelID)=" & x & "));")
//Set rs = getRS("SELECT PricingGroupChannelLnk.PricingGroupChannelLnk_Markup From PricingGroupChannelLnk WHERE (((PricingGroupChannelLnk.PricingGroupChannelLnk_PricingGroupID)=" & gID & ") AND ((PricingGroupChannelLnk.PricingGroupChannelLnk_PackSizeID)=" & rsMaster("ShrinkID") & ") AND ((PricingGroupChannelLnk.PricingGroupChannelLnk_Quantity)=" & rsMaster("quantity") & ") AND ((PricingGroupChannelLnk.PricingGroupChannelLnk_ChannelID)=" & x & "));")
//UPGRADE_WARNING: Couldn't resolve default property of object x. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="6A50421D-15FE-4896-8A1B-2EC21E9037B2"'
rs = modRecordSet.getRS(ref ref "SELECT PricingGroupChannelLnk.PricingGroupChannelLnk_Markup From PricingGroupChannelLnk WHERE (((PricingGroupChannelLnk.PricingGroupChannelLnk_PricingGroupID)=" + gID + ") AND ((PricingGroupChannelLnk.PricingGroupChannelLnk_Quantity)=" + rsMaster.Fields("quantity").Value + ") AND ((PricingGroupChannelLnk.PricingGroupChannelLnk_ChannelID)=" + x + "));");
if (rs.BOF | rs.EOF) {
_with4.CellBackColor = lblcolor[System.Math.Abs(Convert.ToInt16(lColour))].BackColor;
if (rsMaster.Fields("quantity").Value == 1) {
_with4.Text = txtUnit[x].Text;
} else {
_with4.Text = txtCase[x].Text;
}
} else {
_with4.CellBackColor = lblColorChanged.BackColor;
_with4.Text = Strings.FormatNumber(rs.Fields("PricingGroupChannelLnk_Markup").Value, 2);
}
rs.Close();
}
skipRow:
rsMaster.moveNext();
}
_with4.Visible = true;
loading = false;
if (gridItem.RowCount > 1) {
gridItem.row = 1;
gridItem.Col = 2;
} else {
txtEdit.Visible = false;
}
loading = false;
}
private void save()
{
int lCol = 0;
int colorInActive = 0;
string sql = null;
decimal lAmountDefault = default(decimal);
short lQuantity = 0;
if (loading)
return;
if (gridItem.row) {
if (string.IsNullOrEmpty(gridItem.get_TextMatrix(ref gridItem.row, ref 1)))
return;
lQuantity = Convert.ToInt16(gridItem.get_TextMatrix(ref gridItem.row, ref 1));
if (lQuantity > 1) {
lAmountDefault = Convert.ToDecimal(this.txtCase[this.gridItem.Col - 1].Text);
} else {
lAmountDefault = Convert.ToDecimal(this.txtUnit[this.gridItem.Col - 1].Text);
}
if (txtEdit.Text != txtEdit.Tag) {
txtEdit.Tag = txtEdit.Text;
if (string.IsNullOrEmpty(txtEdit.Text)) {
txtEdit.Text = Strings.FormatNumber(lAmountDefault * 100, 0, TriState.False, TriState.False, TriState.False);
if (string.IsNullOrEmpty(txtEdit.Text)) {
txtEdit.Text = "0";
}
}
modRecordSet.cnnDB.Execute("INSERT INTO tempStockItem ( tempStockItemID ) SELECT StockItem.StockItemID FROM StockItem LEFT JOIN tempStockItem ON StockItem.StockItemID = tempStockItem.tempStockItemID WHERE (((StockItem.StockItem_PricingGroupID)=" + gID + ") AND ((tempStockItem.tempStockItemID) Is Null) AND ((StockItem.StockItem_PackSizeID)=" + gridItem.get_RowData(ref gridItem.row) + "));");
gridItem.Text = Strings.FormatNumber(Convert.ToDouble(txtEdit.Text) / 100, 2, TriState.False, TriState.False, TriState.False);
sql = "DELETE FROM PricingGroupChannelLnk WHERE (PricingGroupChannelLnk_PricingGroupID = " + gID + ") AND (PricingGroupChannelLnk_PackSizeID = " + gridItem.get_RowData(ref gridItem.row) + ") AND (PricingGroupChannelLnk_Quantity = " + lQuantity + ") AND (PricingGroupChannelLnk_ChannelID = " + gridItem.Col - 1 + ")";
modRecordSet.cnnDB.Execute(sql);
if (lAmountDefault == Convert.ToDecimal(gridItem.Text)) {
//UPGRADE_WARNING: Couldn't resolve default property of object colorInActive. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="6A50421D-15FE-4896-8A1B-2EC21E9037B2"'
gridItem.CellBackColor = System.Drawing.ColorTranslator.FromOle(colorInActive);
loading = true;
//UPGRADE_WARNING: Couldn't resolve default property of object lCol. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="6A50421D-15FE-4896-8A1B-2EC21E9037B2"'
lCol = gridItem.Col;
gridItem.Col = 0;
if (System.Drawing.ColorTranslator.ToOle(gridItem.CellBackColor) == System.Drawing.ColorTranslator.ToOle(this.lblColorFixed[0].BackColor)) {
//UPGRADE_WARNING: Couldn't resolve default property of object lCol. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="6A50421D-15FE-4896-8A1B-2EC21E9037B2"'
gridItem.Col = lCol;
gridItem.CellBackColor = this.lblcolor[0].BackColor;
} else {
//UPGRADE_WARNING: Couldn't resolve default property of object lCol. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="6A50421D-15FE-4896-8A1B-2EC21E9037B2"'
gridItem.Col = lCol;
gridItem.CellBackColor = this.lblcolor[1].BackColor;
}
loading = false;
} else {
sql = "INSERT INTO PricingGroupChannelLnk (PricingGroupChannelLnk_PricingGroupID, PricingGroupChannelLnk_PackSizeID, PricingGroupChannelLnk_Quantity, PricingGroupChannelLnk_ChannelID, PricingGroupChannelLnk_Markup) VALUES (" + gID + ", " + gridItem.get_RowData(ref gridItem.row) + ", " + lQuantity + ", " + gridItem.Col - 1 + ", " + gridItem.Text + ");";
modRecordSet.cnnDB.Execute(sql);
gridItem.CellBackColor = this.lblColorChanged.BackColor;
}
sql = "INSERT INTO tempStockItem (tempStockItemID) SELECT StockItem.StockItemID FROM StockItem LEFT JOIN tempStockItem ON StockItem.StockItemID = tempStockItem.tempStockItemID WHERE (((StockItem.StockItem_PackSizeID)=" + gridItem.get_RowData(ref gridItem.row) + ") AND ((StockItem.StockItem_PricingGroupID)=" + gID + ") AND ((tempStockItem.tempStockItemID) Is Null));";
modRecordSet.cnnDB.Execute(sql);
}
}
}
private void save_NEW()
{
int lCol = 0;
int colorInActive = 0;
string sql = null;
decimal lAmountDefault = default(decimal);
short lQuantity = 0;
if (loading)
return;
if (gridItem.row) {
if (string.IsNullOrEmpty(gridItem.get_TextMatrix(ref gridItem.row, ref 1)))
return;
lQuantity = Convert.ToInt16(gridItem.get_TextMatrix(ref gridItem.row, ref 1));
if (lQuantity > 1) {
lAmountDefault = Convert.ToDecimal(this.txtCase[this.gridItem.Col - 1].Text);
} else {
lAmountDefault = Convert.ToDecimal(this.txtUnit[this.gridItem.Col - 1].Text);
}
if (txtEdit.Text != txtEdit.Tag) {
txtEdit.Tag = txtEdit.Text;
if (string.IsNullOrEmpty(txtEdit.Text)) {
txtEdit.Text = Strings.FormatNumber(lAmountDefault * 100, 0, TriState.False, TriState.False, TriState.False);
if (string.IsNullOrEmpty(txtEdit.Text)) {
txtEdit.Text = "0";
}
}
//cnnDB.Execute "INSERT INTO tempStockItem ( tempStockItemID ) SELECT StockItem.StockItemID FROM StockItem LEFT JOIN tempStockItem ON StockItem.StockItemID = tempStockItem.tempStockItemID WHERE (((StockItem.StockItem_PricingGroupID)=" & gID & ") AND ((tempStockItem.tempStockItemID) Is Null) AND ((StockItem.StockItem_PackSizeID)=" & gridItem.RowData(gridItem.row) & "));"
modRecordSet.cnnDB.Execute("INSERT INTO tempStockItem ( tempStockItemID ) SELECT StockItem.StockItemID FROM StockItem LEFT JOIN tempStockItem ON StockItem.StockItemID = tempStockItem.tempStockItemID WHERE (((StockItem.StockItem_PricingGroupID)=" + gID + ") AND ((tempStockItem.tempStockItemID) Is Null));");
gridItem.Text = Strings.FormatNumber(Convert.ToDouble(txtEdit.Text) / 100, 2, TriState.False, TriState.False, TriState.False);
//sql = "DELETE FROM PricingGroupChannelLnk WHERE (PricingGroupChannelLnk_PricingGroupID = " & gID & ") AND (PricingGroupChannelLnk_PackSizeID = " & gridItem.RowData(gridItem.row) & ") AND (PricingGroupChannelLnk_Quantity = " & lQuantity & ") AND (PricingGroupChannelLnk_ChannelID = " & gridItem.Col - 1 & ")"
sql = "DELETE FROM PricingGroupChannelLnk WHERE (PricingGroupChannelLnk_PricingGroupID = " + gID + ") AND (PricingGroupChannelLnk_Quantity = " + lQuantity + ") AND (PricingGroupChannelLnk_ChannelID = " + gridItem.Col - 1 + ")";
modRecordSet.cnnDB.Execute(sql);
if (lAmountDefault == Convert.ToDecimal(gridItem.Text)) {
//UPGRADE_WARNING: Couldn't resolve default property of object colorInActive. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="6A50421D-15FE-4896-8A1B-2EC21E9037B2"'
gridItem.CellBackColor = System.Drawing.ColorTranslator.FromOle(colorInActive);
loading = true;
//UPGRADE_WARNING: Couldn't resolve default property of object lCol. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="6A50421D-15FE-4896-8A1B-2EC21E9037B2"'
lCol = gridItem.Col;
gridItem.Col = 0;
if (System.Drawing.ColorTranslator.ToOle(gridItem.CellBackColor) == System.Drawing.ColorTranslator.ToOle(this.lblColorFixed[0].BackColor)) {
//UPGRADE_WARNING: Couldn't resolve default property of object lCol. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="6A50421D-15FE-4896-8A1B-2EC21E9037B2"'
gridItem.Col = lCol;
gridItem.CellBackColor = this.lblcolor[0].BackColor;
} else {
//UPGRADE_WARNING: Couldn't resolve default property of object lCol. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="6A50421D-15FE-4896-8A1B-2EC21E9037B2"'
gridItem.Col = lCol;
gridItem.CellBackColor = this.lblcolor[1].BackColor;
}
loading = false;
} else {
//sql = "INSERT INTO PricingGroupChannelLnk (PricingGroupChannelLnk_PricingGroupID, PricingGroupChannelLnk_PackSizeID, PricingGroupChannelLnk_Quantity, PricingGroupChannelLnk_ChannelID, PricingGroupChannelLnk_Markup) VALUES (" & gID & ", " & gridItem.RowData(gridItem.row) & ", " & lQuantity & ", " & gridItem.Col - 1 & ", " & gridItem.Text & ");"
sql = "INSERT INTO PricingGroupChannelLnk (PricingGroupChannelLnk_PricingGroupID, PricingGroupChannelLnk_PackSizeID, PricingGroupChannelLnk_Quantity, PricingGroupChannelLnk_ChannelID, PricingGroupChannelLnk_Markup) VALUES (" + gID + ", " + 0 + ", " + lQuantity + ", " + gridItem.Col - 1 + ", " + gridItem.Text + ");";
modRecordSet.cnnDB.Execute(sql);
gridItem.CellBackColor = this.lblColorChanged.BackColor;
}
//sql = "INSERT INTO tempStockItem (tempStockItemID) SELECT StockItem.StockItemID FROM StockItem LEFT JOIN tempStockItem ON StockItem.StockItemID = tempStockItem.tempStockItemID WHERE (((StockItem.StockItem_PackSizeID)=" & gridItem.RowData(gridItem.row) & ") AND ((StockItem.StockItem_PricingGroupID)=" & gID & ") AND ((tempStockItem.tempStockItemID) Is Null));"
sql = "INSERT INTO tempStockItem (tempStockItemID) SELECT StockItem.StockItemID FROM StockItem LEFT JOIN tempStockItem ON StockItem.StockItemID = tempStockItem.tempStockItemID WHERE ((StockItem.StockItem_PricingGroupID)=" + gID + ") AND ((tempStockItem.tempStockItemID) Is Null);";
modRecordSet.cnnDB.Execute(sql);
}
}
}
private void cmdExit_Click(System.Object eventSender, System.EventArgs eventArgs)
{
save();
this.Close();
}
private void cmdPricingGroup_Click()
{
}
private void cmdPrint_Click(System.Object eventSender, System.EventArgs eventArgs)
{
//If frmPrint.loadHTM("PricingGroupMatrix", "id=" & lstPricingGroup.ItemData(lstPricingGroup.ListIndex), "PricingMatrix") Then
// frmPrint.show 1
//Else
// MsgBox "Unable to display report!", vbExclamation, "Report Error"
//End If
modApplication.report_PricingMatrixNew(ref gID);
}
private void cmdStockItem_Click()
{
object lstPricingGroup = null;
object frmPrint = null;
//UPGRADE_WARNING: Couldn't resolve default property of object lstPricingGroup.ListIndex. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="6A50421D-15FE-4896-8A1B-2EC21E9037B2"'
//UPGRADE_WARNING: Couldn't resolve default property of object lstPricingGroup.ItemData. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="6A50421D-15FE-4896-8A1B-2EC21E9037B2"'
//UPGRADE_WARNING: Couldn't resolve default property of object frmPrint.loadHTM. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="6A50421D-15FE-4896-8A1B-2EC21E9037B2"'
if (frmPrint.loadHTM("PricingGroupStockItem", "id=" + lstPricingGroup.ItemData(lstPricingGroup.ListIndex), "PricingGroupStockItem")) {
//UPGRADE_WARNING: Couldn't resolve default property of object frmPrint.show. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="6A50421D-15FE-4896-8A1B-2EC21E9037B2"'
frmPrint.show(1);
} else {
Interaction.MsgBox("Unable to display report!", MsgBoxStyle.Exclamation, "Report Error");
}
}
private void cmdClose_Click()
{
}
private void Command1_Click(System.Object eventSender, System.EventArgs eventArgs)
{
modApplication.report_PricingMatrix(ref gID);
}
private void frmPricingMatrix_KeyPress(System.Object eventSender, System.Windows.Forms.KeyPressEventArgs eventArgs)
{
short KeyAscii = Strings.Asc(eventArgs.KeyChar);
switch (KeyAscii) {
case System.Windows.Forms.Keys.Escape:
KeyAscii = 0;
cmdExit_Click(cmdExit, new System.EventArgs());
break;
}
eventArgs.KeyChar = Strings.Chr(KeyAscii);
if (KeyAscii == 0) {
eventArgs.Handled = true;
}
}
private void frmPricingMatrix_Load(System.Object eventSender, System.EventArgs eventArgs)
{
lblcolor.AddRange(new Label[] {
_lblcolor_0,
_lblcolor_1
});
lblColorFixed.AddRange(new Label[] {
_lblColorFixed_0,
_lblColorFixed_1
});
txtCase.AddRange(new TextBox[] {
_txtCase_1,
_txtCase_2,
_txtCase_3,
_txtCase_4,
_txtCase_5,
_txtCase_6,
_txtCase_7,
_txtCase_8
});
txtUnit.AddRange(new TextBox[] {
_txtUnit_1,
_txtUnit_2,
_txtUnit_3,
_txtUnit_4,
_txtUnit_5,
_txtUnit_6,
_txtUnit_7,
_txtUnit_8
});
loadLanguage();
}
//Handles gridItem.EnterCell
private void gridItem_EnterCell(System.Object eventSender, System.EventArgs eventArgs)
{
if (loading)
return;
var _with5 = gridItem;
if (_with5.row) {
} else {
return;
}
txtEdit.SetBounds(sizeConvertors.twipsToPixels(sizeConvertors.pixelToTwips(_with5.Left, true) + _with5.CellLeft, true), sizeConvertors.twipsToPixels(sizeConvertors.pixelToTwips(_with5.Top, false) + _with5.CellTop, false), sizeConvertors.twipsToPixels(_with5.CellWidth, true), sizeConvertors.twipsToPixels(_with5.CellHeight, false));
txtEdit.Text = Strings.Replace(_with5.Text, ".", "");
if (txtEdit.Text == "000")
txtEdit.Text = "0";
txtEdit.Tag = txtEdit.Text;
txtEdit.Visible = true;
if (this.Visible)
txtEdit.Focus();
}
private void gridItem_Enter(System.Object eventSender, System.EventArgs eventArgs)
{
txtEdit_Enter(txtEdit, new System.EventArgs());
}
private void gridItem_KeyPress(System.Object eventSender, KeyPressEventArgs eventArgs)
{
int direction = 0;
direction = 0;
switch (eventArgs.KeyChar) {
case Strings.ChrW(40):
eventArgs.KeyChar = Strings.ChrW(0);
break;
}
}
//Handles gridItem.LeaveCell
private void gridItem_LeaveCell(System.Object eventSender, System.EventArgs eventArgs)
{
save();
}
private void lblHeading_Click(System.Object eventSender, System.EventArgs eventArgs)
{
modApplication.report_PricingMatrix(ref gID);
}
private void txtEdit_Enter(System.Object eventSender, System.EventArgs eventArgs)
{
txtEdit.SelectionStart = 0;
txtEdit.SelectionLength = 999;
}
private void txtEdit_KeyDown(System.Object eventSender, System.Windows.Forms.KeyEventArgs eventArgs)
{
short KeyCode = eventArgs.KeyCode;
short Shift = eventArgs.KeyData / 0x10000;
bool bDoNotEdit = false;
var _with6 = this.gridItem;
switch (KeyCode) {
case 27:
//ESC
txtEdit.Visible = false;
_with6.Focus();
break;
case 13:
//ENTER
_with6.Focus();
System.Windows.Forms.Application.DoEvents();
moveNext(ref ref 1);
break;
case 37:
//Left arrow
if (txtEdit.SelectionStart == 0 & txtEdit.SelectionLength == 0 | txtEdit.SelectedText == txtEdit.Text) {
_with6.Focus();
System.Windows.Forms.Application.DoEvents();
if (_with6.Col > _with6.FixedCols) {
bDoNotEdit = true;
_with6.Col = _with6.Col - 1;
bDoNotEdit = false;
}
}
break;
case 39:
//Right arrow
if (txtEdit.SelectionStart == Strings.Len(txtEdit.Text) | txtEdit.SelectedText == txtEdit.Text) {
_with6.Focus();
System.Windows.Forms.Application.DoEvents();
if (_with6.Col < _with6.ColumnCount - 1) {
bDoNotEdit = true;
_with6.Col = _with6.Col + 1;
bDoNotEdit = false;
}
}
break;
case 38:
//Up arrow
_with6.Focus();
System.Windows.Forms.Application.DoEvents();
if (_with6.row > _with6.FixedRows) {
bDoNotEdit = true;
_with6.row = _with6.row - 1;
bDoNotEdit = false;
}
break;
case 40:
//Down arrow
_with6.Focus();
System.Windows.Forms.Application.DoEvents();
if (_with6.row < _with6.RowCount - 1) {
bDoNotEdit = true;
_with6.row = _with6.row + 1;
bDoNotEdit = false;
}
break;
}
}
private bool moveNext(ref int direction)
{
short x = 0;
short y = 0;
x = gridItem.Col + direction;
if (x >= gridItem.Col) {
gridItem.Col = 1;
if (gridItem.row < gridItem.RowCount - 1) {
y = gridItem.row + 1;
gridItem.TopRow = gridItem.TopRow + 1;
gridItem.row = y;
}
System.Windows.Forms.Application.DoEvents();
} else {
gridItem.Col = gridItem.Col + 1;
}
return true;
}
private void txtEdit_KeyPress(System.Object eventSender, System.Windows.Forms.KeyPressEventArgs eventArgs)
{
short KeyAscii = Strings.Asc(eventArgs.KeyChar);
//
// Delete carriage returns to get rid of beep
// and only allow numbers.
//
short lCurrentX = 0;
switch (KeyAscii) {
case Strings.Asc(Constants.vbCr):
KeyAscii = 0;
break;
case 8:
case 46:
break;
case 48:
case 49:
case 50:
case 51:
case 52:
case 53:
case 54:
case 55:
case 56:
case 57:
break;
case 45:
//-
if (Strings.InStr(txtEdit.Text, "-")) {
} else {
lCurrentX = txtEdit.SelectionStart + 1;
txtEdit.Text = "-" + txtEdit.Text;
txtEdit.SelectionStart = lCurrentX;
}
KeyAscii = 0;
break;
case 43:
//+
if (Strings.InStr(txtEdit.Text, "-")) {
lCurrentX = txtEdit.SelectionStart - 1;
txtEdit.Text = Strings.Right(txtEdit.Text, Strings.Len(txtEdit.Text) - 1);
if (lCurrentX < 0)
lCurrentX = 0;
txtEdit.SelectionStart = lCurrentX;
}
KeyAscii = 0;
break;
default:
KeyAscii = 0;
break;
}
eventArgs.KeyChar = Strings.Chr(KeyAscii);
if (KeyAscii == 0) {
eventArgs.Handled = true;
}
}
}
}
| |
// ------------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information.
// ------------------------------------------------------------------------------
// **NOTE** This file was generated by a tool and any changes will be overwritten.
// Template Source: Templates\CSharp\Requests\EntityCollectionRequest.cs.tt
namespace Microsoft.Graph
{
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Threading;
using System.Linq.Expressions;
/// <summary>
/// The type ListItemsCollectionRequest.
/// </summary>
public partial class ListItemsCollectionRequest : BaseRequest, IListItemsCollectionRequest
{
/// <summary>
/// Constructs a new ListItemsCollectionRequest.
/// </summary>
/// <param name="requestUrl">The URL for the built request.</param>
/// <param name="client">The <see cref="IBaseClient"/> for handling requests.</param>
/// <param name="options">Query and header option name value pairs for the request.</param>
public ListItemsCollectionRequest(
string requestUrl,
IBaseClient client,
IEnumerable<Option> options)
: base(requestUrl, client, options)
{
}
/// <summary>
/// Adds the specified ListItem to the collection via POST.
/// </summary>
/// <param name="listItem">The ListItem to add.</param>
/// <returns>The created ListItem.</returns>
public System.Threading.Tasks.Task<ListItem> AddAsync(ListItem listItem)
{
return this.AddAsync(listItem, CancellationToken.None);
}
/// <summary>
/// Adds the specified ListItem to the collection via POST.
/// </summary>
/// <param name="listItem">The ListItem to add.</param>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param>
/// <returns>The created ListItem.</returns>
public System.Threading.Tasks.Task<ListItem> AddAsync(ListItem listItem, CancellationToken cancellationToken)
{
this.ContentType = "application/json";
this.Method = "POST";
return this.SendAsync<ListItem>(listItem, cancellationToken);
}
/// <summary>
/// Gets the collection page.
/// </summary>
/// <returns>The collection page.</returns>
public System.Threading.Tasks.Task<IListItemsCollectionPage> GetAsync()
{
return this.GetAsync(CancellationToken.None);
}
/// <summary>
/// Gets the collection page.
/// </summary>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param>
/// <returns>The collection page.</returns>
public async System.Threading.Tasks.Task<IListItemsCollectionPage> GetAsync(CancellationToken cancellationToken)
{
this.Method = "GET";
var response = await this.SendAsync<ListItemsCollectionResponse>(null, cancellationToken).ConfigureAwait(false);
if (response != null && response.Value != null && response.Value.CurrentPage != null)
{
if (response.AdditionalData != null)
{
object nextPageLink;
response.AdditionalData.TryGetValue("@odata.nextLink", out nextPageLink);
var nextPageLinkString = nextPageLink as string;
if (!string.IsNullOrEmpty(nextPageLinkString))
{
response.Value.InitializeNextPageRequest(
this.Client,
nextPageLinkString);
}
// Copy the additional data collection to the page itself so that information is not lost
response.Value.AdditionalData = response.AdditionalData;
}
return response.Value;
}
return null;
}
/// <summary>
/// Adds the specified expand value to the request.
/// </summary>
/// <param name="value">The expand value.</param>
/// <returns>The request object to send.</returns>
public IListItemsCollectionRequest Expand(string value)
{
this.QueryOptions.Add(new QueryOption("$expand", value));
return this;
}
/// <summary>
/// Adds the specified expand value to the request.
/// </summary>
/// <param name="expandExpression">The expression from which to calculate the expand value.</param>
/// <returns>The request object to send.</returns>
public IListItemsCollectionRequest Expand(Expression<Func<ListItem, object>> expandExpression)
{
if (expandExpression == null)
{
throw new ArgumentNullException(nameof(expandExpression));
}
string error;
string value = ExpressionExtractHelper.ExtractMembers(expandExpression, out error);
if (value == null)
{
throw new ArgumentException(error, nameof(expandExpression));
}
else
{
this.QueryOptions.Add(new QueryOption("$expand", value));
}
return this;
}
/// <summary>
/// Adds the specified select value to the request.
/// </summary>
/// <param name="value">The select value.</param>
/// <returns>The request object to send.</returns>
public IListItemsCollectionRequest Select(string value)
{
this.QueryOptions.Add(new QueryOption("$select", value));
return this;
}
/// <summary>
/// Adds the specified select value to the request.
/// </summary>
/// <param name="selectExpression">The expression from which to calculate the select value.</param>
/// <returns>The request object to send.</returns>
public IListItemsCollectionRequest Select(Expression<Func<ListItem, object>> selectExpression)
{
if (selectExpression == null)
{
throw new ArgumentNullException(nameof(selectExpression));
}
string error;
string value = ExpressionExtractHelper.ExtractMembers(selectExpression, out error);
if (value == null)
{
throw new ArgumentException(error, nameof(selectExpression));
}
else
{
this.QueryOptions.Add(new QueryOption("$select", value));
}
return this;
}
/// <summary>
/// Adds the specified top value to the request.
/// </summary>
/// <param name="value">The top value.</param>
/// <returns>The request object to send.</returns>
public IListItemsCollectionRequest Top(int value)
{
this.QueryOptions.Add(new QueryOption("$top", value.ToString()));
return this;
}
/// <summary>
/// Adds the specified filter value to the request.
/// </summary>
/// <param name="value">The filter value.</param>
/// <returns>The request object to send.</returns>
public IListItemsCollectionRequest Filter(string value)
{
this.QueryOptions.Add(new QueryOption("$filter", value));
return this;
}
/// <summary>
/// Adds the specified skip value to the request.
/// </summary>
/// <param name="value">The skip value.</param>
/// <returns>The request object to send.</returns>
public IListItemsCollectionRequest Skip(int value)
{
this.QueryOptions.Add(new QueryOption("$skip", value.ToString()));
return this;
}
/// <summary>
/// Adds the specified orderby value to the request.
/// </summary>
/// <param name="value">The orderby value.</param>
/// <returns>The request object to send.</returns>
public IListItemsCollectionRequest OrderBy(string value)
{
this.QueryOptions.Add(new QueryOption("$orderby", value));
return this;
}
}
}
| |
#region S# License
/******************************************************************************************
NOTICE!!! This program and source code is owned and licensed by
StockSharp, LLC, www.stocksharp.com
Viewing or use of this code requires your acceptance of the license
agreement found at https://github.com/StockSharp/StockSharp/blob/master/LICENSE
Removal of this comment is a violation of the license agreement.
Project: StockSharp.Rithmic.Rithmic
File: RithmicUtils.cs
Created: 2015, 12, 2, 8:18 PM
Copyright 2010 by StockSharp, LLC
*******************************************************************************************/
#endregion S# License
namespace StockSharp.Rithmic
{
using System;
using System.Text;
using com.omnesys.rapi;
using Ecng.Common;
using StockSharp.Algo;
using StockSharp.Logging;
using StockSharp.Messages;
using StockSharp.Localization;
internal static class RithmicUtils
{
public static Action<T> WithError<T>(this Action<T> handler, Action<Exception> errorHandler)
{
if (errorHandler == null)
throw new ArgumentNullException(nameof(errorHandler));
return v =>
{
try
{
handler.SafeInvoke(v);
}
catch (Exception ex)
{
errorHandler(ex);
}
};
}
public static Action<T> WithDump<T>(this Action<T> handler, ILogReceiver receiver)
{
if (receiver == null)
throw new ArgumentNullException(nameof(receiver));
return v =>
{
receiver.AddLog(LogLevels.Debug, () => v.DumpableToString());
handler.SafeInvoke(v);
};
}
public static string DumpableToString(this object dumpable)
{
if (dumpable == null)
throw new ArgumentNullException(nameof(dumpable));
dynamic d = dumpable;
var sb = new StringBuilder();
d.Dump(sb);
return sb.ToString();
}
public static DateTimeOffset ToTime(int seconds, int microseconds = 0)
{
return TimeHelper.GregorianStart
.AddSeconds(seconds)
.AddTicks(microseconds * TimeHelper.TicksPerMicrosecond)
.ApplyTimeZone(TimeZoneInfo.Utc);
}
public static DateTimeOffset? ToDateTime(string date, string time)
{
if (date.IsEmpty())
return null;
var dateTime = date.ToDateTimeOffset("yyyyMMdd");
if (!time.IsEmpty())
dateTime = ToTime(time.To<int>());
return dateTime;
}
public static SecurityTypes? ToSecurityType(string instrumentType)
{
if (instrumentType.IsEmpty())
return null;
switch (instrumentType.ToLowerInvariant())
{
case "future":
return SecurityTypes.Future;
case "future option":
return SecurityTypes.Option;
case "spread":
return SecurityTypes.Index;
default:
throw new ArgumentOutOfRangeException(nameof(instrumentType), LocalizedStrings.Str2140Params.Put(instrumentType));
}
}
public static OptionTypes? ToOptionType(string putCallIndicator)
{
if (putCallIndicator.IsEmpty())
return null;
switch (putCallIndicator.ToLowerInvariant())
{
case "call":
return OptionTypes.Call;
case "put":
return OptionTypes.Put;
default:
throw new ArgumentOutOfRangeException(nameof(putCallIndicator), LocalizedStrings.Str1606Params.Put(putCallIndicator));
}
}
public static Sides? ToOriginSide(string aggressorSide)
{
switch (aggressorSide)
{
case "B":
return Sides.Buy;
case "S":
return Sides.Sell;
default:
return null;
}
}
public static string ToRithmic(this Sides side)
{
return side == Sides.Buy
? Constants.BUY_SELL_TYPE_BUY
: Constants.BUY_SELL_TYPE_SELL;
}
public static string ToRithmic(this TimeInForce? tif, DateTimeOffset? expiryDate)
{
switch (tif)
{
case TimeInForce.PutInQueue:
case null:
{
if (expiryDate == null)
return Constants.ORDER_DURATION_GTC;
else// if (expiryDate == DateTime.Today)
return Constants.ORDER_DURATION_DAY;
}
case TimeInForce.MatchOrCancel:
return Constants.ORDER_DURATION_FOK;
case TimeInForce.CancelBalance:
return Constants.ORDER_DURATION_IOC;
default:
throw new ArgumentOutOfRangeException(nameof(tif));
}
}
public static OrderTypes ToOrderType(string orderType)
{
if (orderType == Constants.ORDER_TYPE_LIMIT)
return OrderTypes.Limit;
else if (orderType == Constants.ORDER_TYPE_LMT_IF_TOUCHED)
return OrderTypes.Limit;
else if (orderType == Constants.ORDER_TYPE_MARKET)
return OrderTypes.Market;
else if (orderType == Constants.ORDER_TYPE_MKT_IF_TOUCHED)
return OrderTypes.Market;
else if (orderType == Constants.ORDER_TYPE_STOP_LIMIT)
return OrderTypes.Conditional;
else if (orderType == Constants.ORDER_TYPE_STOP_MARKET)
return OrderTypes.Conditional;
else if (orderType == Constants.ORDER_TYPE_EXTERNAL)
return OrderTypes.Execute;
else
throw new ArgumentOutOfRangeException(nameof(orderType), orderType, LocalizedStrings.Str3499);
}
public static Sides ToSide(string buySellType)
{
if (buySellType == Constants.BUY_SELL_TYPE_BUY)
return Sides.Buy;
else if (buySellType == Constants.BUY_SELL_TYPE_SELL)
return Sides.Sell;
else if (buySellType == Constants.BUY_SELL_TYPE_SELL_SHORT)
return Sides.Sell;
else if (buySellType == Constants.BUY_SELL_TYPE_SELL_SHORT_EXEMPT)
return Sides.Sell;
else
throw new ArgumentOutOfRangeException(nameof(buySellType), buySellType, LocalizedStrings.Str3500);
}
public static TimeInForce ToTif(string orderDuration)
{
if (orderDuration == Constants.ORDER_DURATION_DAY)
return TimeInForce.PutInQueue;
else if (orderDuration == Constants.ORDER_DURATION_FOK)
return TimeInForce.MatchOrCancel;
else if (orderDuration == Constants.ORDER_DURATION_GTC)
return TimeInForce.PutInQueue;
else if (orderDuration == Constants.ORDER_DURATION_IOC)
return TimeInForce.CancelBalance;
else
throw new ArgumentOutOfRangeException(nameof(orderDuration), orderDuration, LocalizedStrings.Str3501);
}
public static int ToSsboe(this DateTimeOffset time)
{
var value = (int)(time.UtcDateTime - TimeHelper.GregorianStart).TotalSeconds;
if (value < 0)
throw new ArgumentOutOfRangeException(nameof(time), time, LocalizedStrings.Str3502);
return value;
}
public static PortfolioStates? ToPortfolioState(this string state)
{
switch (state)
{
case "active":
return PortfolioStates.Active;
case "inactive":
return PortfolioStates.Blocked;
default:
return null;
}
}
public static BaseChangeMessage<TChange> TryAdd<TChange>(this BaseChangeMessage<TChange> message, TChange change, Ignorable<double> value)
{
switch (value.State)
{
case Ignorable<double>.ValueState.Ignore:
break;
case Ignorable<double>.ValueState.Clear:
message.Add(change, 0m);
break;
case Ignorable<double>.ValueState.Use:
message.TryAdd(change, value.Value.ToDecimal());
break;
default:
throw new ArgumentOutOfRangeException();
}
return message;
}
}
}
| |
//-----------------------------------------------------------------------------
// Copyright (c) 2012 GarageGames, LLC
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//-----------------------------------------------------------------------------
new GFXStateBlockData( AL_DepthVisualizeState )
{
zDefined = true;
zEnable = false;
zWriteEnable = false;
samplersDefined = true;
samplerStates[0] = SamplerClampPoint; // depth
samplerStates[1] = SamplerClampLinear; // viz color lookup
};
new GFXStateBlockData( AL_DefaultVisualizeState )
{
blendDefined = true;
blendEnable = true;
blendSrc = GFXBlendSrcAlpha;
blendDest = GFXBlendInvSrcAlpha;
zDefined = true;
zEnable = false;
zWriteEnable = false;
samplersDefined = true;
samplerStates[0] = SamplerClampPoint; // #deferred
samplerStates[1] = SamplerClampLinear; // depthviz
};
new ShaderData( AL_DepthVisualizeShader )
{
DXVertexShaderFile = "shaders/common/postFx/postFxV.hlsl";
DXPixelShaderFile = "shaders/common/lighting/advanced/dbgDepthVisualizeP.hlsl";
OGLVertexShaderFile = "shaders/common/postFx/gl/postFxV.glsl";
OGLPixelShaderFile = "shaders/common/lighting/advanced/gl/dbgDepthVisualizeP.glsl";
samplerNames[0] = "deferredTex";
samplerNames[1] = "depthViz";
pixVersion = 2.0;
};
singleton PostEffect( AL_DepthVisualize )
{
shader = AL_DepthVisualizeShader;
stateBlock = AL_DefaultVisualizeState;
texture[0] = "#deferred";
texture[1] = "depthviz";
target = "$backBuffer";
renderPriority = 9999;
};
function AL_DepthVisualize::onEnabled( %this )
{
AL_NormalsVisualize.disable();
AL_LightColorVisualize.disable();
AL_LightSpecularVisualize.disable();
$AL_NormalsVisualizeVar = false;
$AL_LightColorVisualizeVar = false;
$AL_LightSpecularVisualizeVar = false;
return true;
}
new ShaderData( AL_GlowVisualizeShader )
{
DXVertexShaderFile = "shaders/common/postFx/postFxV.hlsl";
DXPixelShaderFile = "shaders/common/lighting/advanced/dbgGlowVisualizeP.hlsl";
OGLVertexShaderFile = "shaders/common/postFx/gl/postFxV.glsl";
OGLPixelShaderFile = "shaders/common/lighting/advanced/gl/dbgGlowVisualizeP.glsl";
samplerNames[0] = "glowBuffer";
pixVersion = 2.0;
};
singleton PostEffect( AL_GlowVisualize )
{
shader = AL_GlowVisualizeShader;
stateBlock = AL_DefaultVisualizeState;
texture[0] = "#glowbuffer";
target = "$backBuffer";
renderPriority = 9999;
};
new ShaderData( AL_NormalsVisualizeShader )
{
DXVertexShaderFile = "shaders/common/postFx/postFxV.hlsl";
DXPixelShaderFile = "shaders/common/lighting/advanced/dbgNormalVisualizeP.hlsl";
OGLVertexShaderFile = "shaders/common/postFx/gl/postFxV.glsl";
OGLPixelShaderFile = "shaders/common/lighting/advanced/gl/dbgNormalVisualizeP.glsl";
samplerNames[0] = "deferredTex";
pixVersion = 2.0;
};
singleton PostEffect( AL_NormalsVisualize )
{
shader = AL_NormalsVisualizeShader;
stateBlock = AL_DefaultVisualizeState;
texture[0] = "#deferred";
target = "$backBuffer";
renderPriority = 9999;
};
function AL_NormalsVisualize::onEnabled( %this )
{
AL_DepthVisualize.disable();
AL_LightColorVisualize.disable();
AL_LightSpecularVisualize.disable();
$AL_DepthVisualizeVar = false;
$AL_LightColorVisualizeVar = false;
$AL_LightSpecularVisualizeVar = false;
return true;
}
new ShaderData( AL_LightColorVisualizeShader )
{
DXVertexShaderFile = "shaders/common/postFx/postFxV.hlsl";
DXPixelShaderFile = "shaders/common/lighting/advanced/dbgLightColorVisualizeP.hlsl";
OGLVertexShaderFile = "shaders/common/postFx/gl/postFxV.glsl";
OGLPixelShaderFile = "shaders/common/lighting/advanced/gl/dbgLightColorVisualizeP.glsl";
samplerNames[0] = "lightDeferredTex";
pixVersion = 2.0;
};
singleton PostEffect( AL_LightColorVisualize )
{
shader = AL_LightColorVisualizeShader;
stateBlock = AL_DefaultVisualizeState;
texture[0] = "#lightinfo";
target = "$backBuffer";
renderPriority = 9999;
};
function AL_LightColorVisualize::onEnabled( %this )
{
AL_NormalsVisualize.disable();
AL_DepthVisualize.disable();
AL_LightSpecularVisualize.disable();
$AL_NormalsVisualizeVar = false;
$AL_DepthVisualizeVar = false;
$AL_LightSpecularVisualizeVar = false;
return true;
}
new ShaderData( AL_LightSpecularVisualizeShader )
{
DXVertexShaderFile = "shaders/common/postFx/postFxV.hlsl";
DXPixelShaderFile = "shaders/common/lighting/advanced/dbgLightSpecularVisualizeP.hlsl";
OGLVertexShaderFile = "shaders/common/postFx/gl/postFxV.glsl";
OGLPixelShaderFile = "shaders/common/lighting/advanced/gl/dbgLightSpecularVisualizeP.glsl";
samplerNames[0] = "lightDeferredTex";
pixVersion = 2.0;
};
singleton PostEffect( AL_LightSpecularVisualize )
{
shader = AL_LightSpecularVisualizeShader;
stateBlock = AL_DefaultVisualizeState;
texture[0] = "#lightinfo";
target = "$backBuffer";
renderPriority = 9999;
};
function AL_LightSpecularVisualize::onEnabled( %this )
{
AL_NormalsVisualize.disable();
AL_DepthVisualize.disable();
AL_LightColorVisualize.disable();
$AL_NormalsVisualizeVar = false;
$AL_DepthVisualizeVar = false;
$AL_LightColorVisualizeVar = false;
return true;
}
/// Toggles the visualization of the AL depth buffer.
function toggleDepthViz( %enable )
{
if ( %enable $= "" )
{
$AL_DepthVisualizeVar = AL_DepthVisualize.isEnabled() ? false : true;
AL_DepthVisualize.toggle();
}
else if ( %enable )
AL_DepthVisualize.enable();
else if ( !%enable )
AL_DepthVisualize.disable();
}
/// Toggles the visualization of the AL depth buffer.
function toggleGlowViz( %enable )
{
if ( %enable $= "" )
{
$AL_GlowVisualizeVar = AL_GlowVisualize.isEnabled() ? false : true;
AL_GlowVisualize.toggle();
}
else if ( %enable )
AL_GlowVisualize.enable();
else if ( !%enable )
AL_GlowVisualize.disable();
}
/// Toggles the visualization of the AL normals buffer.
function toggleNormalsViz( %enable )
{
if ( %enable $= "" )
{
$AL_NormalsVisualizeVar = AL_NormalsVisualize.isEnabled() ? false : true;
AL_NormalsVisualize.toggle();
}
else if ( %enable )
AL_NormalsVisualize.enable();
else if ( !%enable )
AL_NormalsVisualize.disable();
}
/// Toggles the visualization of the AL lighting color buffer.
function toggleLightColorViz( %enable )
{
if ( %enable $= "" )
{
$AL_LightColorVisualizeVar = AL_LightColorVisualize.isEnabled() ? false : true;
AL_LightColorVisualize.toggle();
}
else if ( %enable )
AL_LightColorVisualize.enable();
else if ( !%enable )
AL_LightColorVisualize.disable();
}
/// Toggles the visualization of the AL lighting specular power buffer.
function toggleLightSpecularViz( %enable )
{
if ( %enable $= "" )
{
$AL_LightSpecularVisualizeVar = AL_LightSpecularVisualize.isEnabled() ? false : true;
AL_LightSpecularVisualize.toggle();
}
else if ( %enable )
AL_LightSpecularVisualize.enable();
else if ( !%enable )
AL_LightSpecularVisualize.disable();
}
function toggleBackbufferViz( %enable )
{
if ( %enable $= "" )
{
$AL_BackbufferVisualizeVar = AL_DeferredShading.isEnabled() ? true : false;
AL_DeferredShading.toggle();
}
else if ( %enable )
AL_DeferredShading.disable();
else if ( !%enable )
AL_DeferredShading.enable();
}
| |
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net;
using System.Net.Sockets;
using System.Reflection;
using System.Runtime;
using System.Text;
using System.Threading.Tasks;
using System.Xml;
namespace Orleans.Runtime.Configuration
{
/// <summary>
/// Utilities class for handling configuration.
/// </summary>
public static class ConfigUtilities
{
internal static void ParseAdditionalAssemblyDirectories(IDictionary<string, SearchOption> directories, XmlElement root)
{
foreach(var node in root.ChildNodes)
{
var grandchild = node as XmlElement;
if(grandchild == null)
{
continue;
}
else
{
if(!grandchild.HasAttribute("Path"))
throw new FormatException("Missing 'Path' attribute on Directory element.");
// default to recursive
var recursive = true;
if(grandchild.HasAttribute("IncludeSubFolders"))
{
if(!bool.TryParse(grandchild.Attributes["IncludeSubFolders"].Value, out recursive))
throw new FormatException("Attribute 'IncludeSubFolders' has invalid value.");
directories[grandchild.Attributes["Path"].Value] = recursive ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly;
}
}
}
}
internal static void ParseTelemetry(XmlElement root)
{
foreach (var node in root.ChildNodes)
{
var grandchild = node as XmlElement;
if (grandchild == null) continue;
if (!grandchild.LocalName.Equals("TelemetryConsumer"))
{
continue;
}
else
{
if (!grandchild.HasAttribute("Type"))
throw new FormatException("Missing 'Type' attribute on TelemetryConsumer element.");
if (!grandchild.HasAttribute("Assembly"))
throw new FormatException("Missing 'Type' attribute on TelemetryConsumer element.");
var className = grandchild.Attributes["Type"].Value;
var assemblyName = new AssemblyName(grandchild.Attributes["Assembly"].Value);
Assembly assembly = null;
try
{
assembly = Assembly.Load(assemblyName);
var pluginType = assembly.GetType(className);
if (pluginType == null) throw new TypeLoadException("Cannot locate plugin class " + className + " in assembly " + assembly.FullName);
var args = grandchild.Attributes.Cast<XmlAttribute>().Where(a => a.LocalName != "Type" && a.LocalName != "Assembly").ToArray();
var plugin = Activator.CreateInstance(pluginType, args);
if (plugin is ITelemetryConsumer)
{
LogManager.TelemetryConsumers.Add(plugin as ITelemetryConsumer);
}
else
{
throw new InvalidCastException("TelemetryConsumer class " + className + " must implement one of Orleans.Runtime.ITelemetryConsumer based interfaces");
}
}
catch (Exception exc)
{
throw new TypeLoadException("Cannot load TelemetryConsumer class " + className + " from assembly " + assembly + " - Error=" + exc);
}
}
}
}
internal static void ParseTracing(ITraceConfiguration config, XmlElement root, string nodeName)
{
if (root.HasAttribute("DefaultTraceLevel"))
{
config.DefaultTraceLevel = ParseSeverity(root.GetAttribute("DefaultTraceLevel"),
"Invalid trace level DefaultTraceLevel attribute value on Tracing element for " + nodeName);
}
if (root.HasAttribute("TraceToConsole"))
{
config.TraceToConsole = ParseBool(root.GetAttribute("TraceToConsole"),
"Invalid boolean value for TraceToConsole attribute on Tracing element for " + nodeName);
}
if (root.HasAttribute("TraceToFile"))
{
config.TraceFilePattern = root.GetAttribute("TraceToFile");
}
if (root.HasAttribute("LargeMessageWarningThreshold"))
{
config.LargeMessageWarningThreshold = ParseInt(root.GetAttribute("LargeMessageWarningThreshold"),
"Invalid boolean value for LargeMessageWarningThresholdattribute on Tracing element for " + nodeName);
}
if (root.HasAttribute("PropagateActivityId"))
{
config.PropagateActivityId = ParseBool(root.GetAttribute("PropagateActivityId"),
"Invalid boolean value for PropagateActivityId attribute on Tracing element for " + nodeName);
}
if (root.HasAttribute("BulkMessageLimit"))
{
config.BulkMessageLimit = ParseInt(root.GetAttribute("BulkMessageLimit"),
"Invalid int value for BulkMessageLimit attribute on Tracing element for " + nodeName);
}
foreach (XmlNode node in root.ChildNodes)
{
var grandchild = node as XmlElement;
if (grandchild == null) continue;
if (grandchild.LocalName.Equals("TraceLevelOverride") && grandchild.HasAttribute("TraceLevel") && grandchild.HasAttribute("LogPrefix"))
{
config.TraceLevelOverrides.Add(new Tuple<string, Severity>(grandchild.GetAttribute("LogPrefix"),
ParseSeverity(grandchild.GetAttribute("TraceLevel"),
"Invalid trace level TraceLevel attribute value on TraceLevelOverride element for " + nodeName + " prefix " +
grandchild.GetAttribute("LogPrefix"))));
}
else if (grandchild.LocalName.Equals("LogConsumer"))
{
var className = grandchild.InnerText;
Assembly assembly = null;
try
{
int pos = className.IndexOf(',');
if (pos > 0)
{
var assemblyName = className.Substring(pos + 1).Trim();
className = className.Substring(0, pos).Trim();
assembly = Assembly.Load(new AssemblyName(assemblyName));
}
else
{
assembly = typeof(ConfigUtilities).GetTypeInfo().Assembly;
}
var pluginType = assembly.GetType(className);
if (pluginType == null) throw new TypeLoadException("Cannot locate plugin class " + className + " in assembly " + assembly.FullName);
var plugin = Activator.CreateInstance(pluginType);
if (plugin is ILogConsumer)
{
LogManager.LogConsumers.Add(plugin as ILogConsumer);
}
else
{
throw new InvalidCastException("LogConsumer class " + className + " must implement Orleans.ILogConsumer interface");
}
}
catch (Exception exc)
{
throw new TypeLoadException("Cannot load LogConsumer class " + className + " from assembly " + assembly + " - Error=" + exc);
}
}
}
SetTraceFileName(config, nodeName, DateTime.UtcNow);
}
internal static void ParseStatistics(IStatisticsConfiguration config, XmlElement root, string nodeName)
{
if (root.HasAttribute("ProviderType"))
{
config.StatisticsProviderName = root.GetAttribute("ProviderType");
}
if (root.HasAttribute("MetricsTableWriteInterval"))
{
config.StatisticsMetricsTableWriteInterval = ParseTimeSpan(root.GetAttribute("MetricsTableWriteInterval"),
"Invalid TimeSpan value for Statistics.MetricsTableWriteInterval attribute on Statistics element for " + nodeName);
}
if (root.HasAttribute("PerfCounterWriteInterval"))
{
config.StatisticsPerfCountersWriteInterval = ParseTimeSpan(root.GetAttribute("PerfCounterWriteInterval"),
"Invalid TimeSpan value for Statistics.PerfCounterWriteInterval attribute on Statistics element for " + nodeName);
}
if (root.HasAttribute("LogWriteInterval"))
{
config.StatisticsLogWriteInterval = ParseTimeSpan(root.GetAttribute("LogWriteInterval"),
"Invalid TimeSpan value for Statistics.LogWriteInterval attribute on Statistics element for " + nodeName);
}
if (root.HasAttribute("WriteLogStatisticsToTable"))
{
config.StatisticsWriteLogStatisticsToTable = ParseBool(root.GetAttribute("WriteLogStatisticsToTable"),
"Invalid bool value for Statistics.WriteLogStatisticsToTable attribute on Statistics element for " + nodeName);
}
if (root.HasAttribute("StatisticsCollectionLevel"))
{
config.StatisticsCollectionLevel = ConfigUtilities.ParseEnum<StatisticsLevel>(root.GetAttribute("StatisticsCollectionLevel"),
"Invalid value of for Statistics.StatisticsCollectionLevel attribute on Statistics element for " + nodeName);
}
}
internal static void ParseLimitValues(LimitManager limitManager, XmlElement root, string nodeName)
{
foreach (XmlNode node in root.ChildNodes)
{
var grandchild = node as XmlElement;
if (grandchild == null) continue;
if (grandchild.LocalName.Equals("Limit") && grandchild.HasAttribute("Name")
&& (grandchild.HasAttribute("SoftLimit") || grandchild.HasAttribute("HardLimit")))
{
var limitName = grandchild.GetAttribute("Name");
limitManager.AddLimitValue(limitName, new LimitValue
{
Name = limitName,
SoftLimitThreshold = ParseInt(grandchild.GetAttribute("SoftLimit"),
"Invalid integer value for the SoftLimit attribute on the Limit element"),
HardLimitThreshold = grandchild.HasAttribute("HardLimit") ? ParseInt(grandchild.GetAttribute("HardLimit"),
"Invalid integer value for the HardLimit attribute on the Limit element") : 0,
});
}
}
}
internal static void SetTraceFileName(ITraceConfiguration config, string nodeName, DateTime timestamp)
{
const string dateFormat = "yyyy-MM-dd-HH.mm.ss.fffZ";
if (config == null) throw new ArgumentNullException("config");
if (config.TraceFilePattern == null
|| string.IsNullOrWhiteSpace(config.TraceFilePattern)
|| config.TraceFilePattern.Equals("false", StringComparison.OrdinalIgnoreCase)
|| config.TraceFilePattern.Equals("none", StringComparison.OrdinalIgnoreCase))
{
config.TraceFileName = null;
}
else if (string.Empty.Equals(config.TraceFileName))
{
config.TraceFileName = null; // normalize
}
else
{
string traceFileDir = Path.GetDirectoryName(config.TraceFilePattern);
if (!String.IsNullOrEmpty(traceFileDir) && !Directory.Exists(traceFileDir))
{
string traceFileName = Path.GetFileName(config.TraceFilePattern);
string[] alternateDirLocations = { "appdir", "." };
foreach (var d in alternateDirLocations)
{
if (Directory.Exists(d))
{
config.TraceFilePattern = Path.Combine(d, traceFileName);
break;
}
}
}
config.TraceFileName = String.Format(config.TraceFilePattern, nodeName, timestamp.ToUniversalTime().ToString(dateFormat), Dns.GetHostName());
}
}
internal static int ParseInt(string input, string errorMessage)
{
int p;
if (!Int32.TryParse(input, out p))
{
throw new FormatException(errorMessage);
}
return p;
}
internal static long ParseLong(string input, string errorMessage)
{
long p;
if (!Int64.TryParse(input, out p))
{
throw new FormatException(errorMessage + ". Tried to parse " + input);
}
return p;
}
internal static bool ParseBool(string input, string errorMessage)
{
bool p;
if (Boolean.TryParse(input, out p)) return p;
switch (input)
{
case "0":
p = false;
break;
case "1":
p = true;
break;
default:
throw new FormatException(errorMessage + ". Tried to parse " + input);
}
return p;
}
internal static double ParseDouble(string input, string errorMessage)
{
double p;
if (!Double.TryParse(input, out p))
{
throw new FormatException(errorMessage + ". Tried to parse " + input);
}
return p;
}
internal static Guid ParseGuid(string input, string errorMessage)
{
Guid p;
if (!Guid.TryParse(input, out p))
{
throw new FormatException(errorMessage);
}
return p;
}
internal static Type ParseFullyQualifiedType(string input, string errorMessage)
{
Type returnValue;
try
{
returnValue = Type.GetType(input);
}
catch(Exception e)
{
throw new FormatException(errorMessage, e);
}
if (returnValue == null)
{
throw new FormatException(errorMessage);
}
return returnValue;
}
internal static void ValidateSerializationProvider(TypeInfo type)
{
if (type.IsClass == false)
{
throw new FormatException(string.Format("The serialization provider type {0} was not a class", type.FullName));
}
if (type.IsAbstract)
{
throw new FormatException(string.Format("The serialization provider type {0} was an abstract class", type.FullName));
}
if (type.IsPublic == false)
{
throw new FormatException(string.Format("The serialization provider type {0} is not public", type.FullName));
}
if (type.IsGenericType && type.IsConstructedGenericType() == false)
{
throw new FormatException(string.Format("The serialization provider type {0} is generic and has a missing type parameter specification", type.FullName));
}
var constructor = type.GetConstructor(Type.EmptyTypes);
if (constructor == null)
{
throw new FormatException(string.Format("The serialization provider type {0} does not have a parameterless constructor", type.FullName));
}
if (constructor.IsPublic == false)
{
throw new FormatException(string.Format("The serialization provider type {0} has a non-public parameterless constructor", type.FullName));
}
}
// Time spans are entered as a string of decimal digits, optionally followed by a unit string: "ms", "s", "m", "hr"
internal static TimeSpan ParseTimeSpan(string input, string errorMessage)
{
int unitSize;
string numberInput;
var trimmedInput = input.Trim().ToLowerInvariant();
if (trimmedInput.EndsWith("ms", StringComparison.Ordinal))
{
unitSize = 1;
numberInput = trimmedInput.Remove(trimmedInput.Length - 2).Trim();
}
else if (trimmedInput.EndsWith("s", StringComparison.Ordinal))
{
unitSize = 1000;
numberInput = trimmedInput.Remove(trimmedInput.Length - 1).Trim();
}
else if (trimmedInput.EndsWith("m", StringComparison.Ordinal))
{
unitSize = 60 * 1000;
numberInput = trimmedInput.Remove(trimmedInput.Length - 1).Trim();
}
else if (trimmedInput.EndsWith("hr", StringComparison.Ordinal))
{
unitSize = 60 * 60 * 1000;
numberInput = trimmedInput.Remove(trimmedInput.Length - 2).Trim();
}
else
{
unitSize = 1000; // Default is seconds
numberInput = trimmedInput;
}
double rawTimeSpan;
if (!double.TryParse(numberInput, out rawTimeSpan))
{
throw new FormatException(errorMessage + ". Tried to parse " + input);
}
return TimeSpan.FromMilliseconds(rawTimeSpan * unitSize);
}
internal static string ToParseableTimeSpan(TimeSpan input)
{
return $"{input.TotalMilliseconds.ToString(CultureInfo.InvariantCulture)}ms";
}
internal static byte[] ParseSubnet(string input, string errorMessage)
{
return string.IsNullOrEmpty(input) ? null : input.Split('.').Select(s => (byte) ParseInt(s, errorMessage)).ToArray();
}
internal static T ParseEnum<T>(string input, string errorMessage)
where T : struct // really, where T : enum, but there's no way to require that in C#
{
T s;
if (!Enum.TryParse<T>(input, out s))
{
throw new FormatException(errorMessage + ". Tried to parse " + input);
}
return s;
}
internal static Severity ParseSeverity(string input, string errorMessage)
{
Severity s;
if (!Enum.TryParse<Severity>(input, out s))
{
throw new FormatException(errorMessage + ". Tried to parse " + input);
}
return s;
}
internal static async Task<IPEndPoint> ParseIPEndPoint(XmlElement root, byte[] subnet = null)
{
if (!root.HasAttribute("Address")) throw new FormatException("Missing Address attribute for " + root.LocalName + " element");
if (!root.HasAttribute("Port")) throw new FormatException("Missing Port attribute for " + root.LocalName + " element");
var family = AddressFamily.InterNetwork;
if (root.HasAttribute("Subnet"))
{
subnet = ParseSubnet(root.GetAttribute("Subnet"), "Invalid subnet");
}
if (root.HasAttribute("PreferredFamily"))
{
family = ParseEnum<AddressFamily>(root.GetAttribute("PreferredFamily"),
"Invalid preferred addressing family for " + root.LocalName + " element");
}
IPAddress addr = await ClusterConfiguration.ResolveIPAddress(root.GetAttribute("Address"), subnet, family);
int port = ParseInt(root.GetAttribute("Port"), "Invalid Port attribute for " + root.LocalName + " element");
return new IPEndPoint(addr, port);
}
internal static string TraceConfigurationToString(ITraceConfiguration config)
{
var sb = new StringBuilder();
sb.Append(" Tracing: ").AppendLine();
sb.Append(" Default Trace Level: ").Append(config.DefaultTraceLevel).AppendLine();
if (config.TraceLevelOverrides.Count > 0)
{
sb.Append(" TraceLevelOverrides:").AppendLine();
foreach (var over in config.TraceLevelOverrides)
{
sb.Append(" ").Append(over.Item1).Append(" ==> ").Append(over.Item2.ToString()).AppendLine();
}
}
else
{
sb.Append(" TraceLevelOverrides: None").AppendLine();
}
sb.Append(" Trace to Console: ").Append(config.TraceToConsole).AppendLine();
sb.Append(" Trace File Name: ").Append(string.IsNullOrWhiteSpace(config.TraceFileName) ? "" : Path.GetFullPath(config.TraceFileName)).AppendLine();
sb.Append(" LargeMessageWarningThreshold: ").Append(config.LargeMessageWarningThreshold).AppendLine();
sb.Append(" PropagateActivityId: ").Append(config.PropagateActivityId).AppendLine();
sb.Append(" BulkMessageLimit: ").Append(config.BulkMessageLimit).AppendLine();
return sb.ToString();
}
internal static string IStatisticsConfigurationToString(IStatisticsConfiguration config)
{
var sb = new StringBuilder();
sb.Append(" Statistics: ").AppendLine();
sb.Append(" MetricsTableWriteInterval: ").Append(config.StatisticsMetricsTableWriteInterval).AppendLine();
sb.Append(" PerfCounterWriteInterval: ").Append(config.StatisticsPerfCountersWriteInterval).AppendLine();
sb.Append(" LogWriteInterval: ").Append(config.StatisticsLogWriteInterval).AppendLine();
sb.Append(" WriteLogStatisticsToTable: ").Append(config.StatisticsWriteLogStatisticsToTable).AppendLine();
sb.Append(" StatisticsCollectionLevel: ").Append(config.StatisticsCollectionLevel).AppendLine();
#if TRACK_DETAILED_STATS
sb.Append(" TRACK_DETAILED_STATS: true").AppendLine();
#endif
if (!string.IsNullOrEmpty(config.StatisticsProviderName))
sb.Append(" StatisticsProviderName:").Append(config.StatisticsProviderName).AppendLine();
return sb.ToString();
}
/// <summary>
/// Prints the the DataConnectionString,
/// without disclosing any credential info
/// such as the Azure Storage AccountKey or SqlServer password.
/// </summary>
/// <param name="dataConnectionString">The connection string to print.</param>
/// <returns>The string representation of the DataConnectionString with account credential info redacted.</returns>
public static string RedactConnectionStringInfo(string dataConnectionString)
{
return PrintSqlConnectionString(
PrintDataConnectionInfo(dataConnectionString));
}
public static string PrintDataConnectionInfo(string azureConnectionString)
{
if (String.IsNullOrEmpty(azureConnectionString)) return "null";
string azureConnectionInfo = azureConnectionString;
// Remove any Azure account keys from connection string info written to log files
int accountKeyPos = azureConnectionInfo.LastIndexOf("AccountKey=", StringComparison.Ordinal);
if (accountKeyPos > 0)
{
azureConnectionInfo = azureConnectionInfo.Remove(accountKeyPos) + "AccountKey=<--SNIP-->";
}
return azureConnectionInfo;
}
public static string PrintSqlConnectionString(string sqlConnectionString)
{
if (String.IsNullOrEmpty(sqlConnectionString))
{
return "null";
}
var sqlConnectionInfo = sqlConnectionString;
// Remove any Azure account keys from connection string info written to log files
int keyPos = sqlConnectionInfo.LastIndexOf("Password=", StringComparison.OrdinalIgnoreCase);
if (keyPos > 0)
{
sqlConnectionInfo = sqlConnectionInfo.Remove(keyPos) + "Password=<--SNIP-->";
}
return sqlConnectionInfo;
}
public static TimeSpan ParseCollectionAgeLimit(XmlElement xmlElement)
{
if (xmlElement.LocalName != "Deactivation")
throw new ArgumentException("The XML element must be a <Deactivate/> element.");
if (!xmlElement.HasAttribute("AgeLimit"))
throw new ArgumentException("The AgeLimit attribute is required for a <Deactivate/> element.");
return ParseTimeSpan(xmlElement.GetAttribute("AgeLimit"), "Invalid TimeSpan value for Deactivation.AgeLimit");
}
private static readonly string[] defaultClientConfigFileNames = { "ClientConfiguration.xml", "OrleansClientConfiguration.xml", "Client.config", "Client.xml" };
private static readonly string[] defaultSiloConfigFileNames = { "OrleansConfiguration.xml", "orleans.config", "config.xml", "orleans.config.xml" };
private static readonly string[] defaultConfigDirs =
{
null, // Will be filled in with directory location for this executing assembly
"approot", // Azure AppRoot directory
".", // Current directory
".." // Parent directory
};
public static string FindConfigFile(bool isSilo)
{
// Add directory containing Orleans binaries to the search locations for config files
defaultConfigDirs[0] = Path.GetDirectoryName(typeof(ConfigUtilities).GetTypeInfo().Assembly.Location);
var notFound = new List<string>();
foreach (string dir in defaultConfigDirs)
{
foreach (string file in isSilo ? defaultSiloConfigFileNames : defaultClientConfigFileNames)
{
var fileName = Path.GetFullPath(Path.Combine(dir, file));
if (File.Exists(fileName)) return fileName;
notFound.Add(fileName);
}
}
var whereWeLooked = new StringBuilder();
whereWeLooked.AppendFormat("Cannot locate Orleans {0} config file.", isSilo ? "silo" : "client").AppendLine();
whereWeLooked.AppendLine("Searched locations:");
foreach (var i in notFound)
{
whereWeLooked.AppendFormat("\t- {0}", i).AppendLine();
}
throw new FileNotFoundException(whereWeLooked.ToString());
}
/// <summary>
/// Returns the Runtime Version information.
/// </summary>
/// <returns>the Runtime Version information</returns>
public static string RuntimeVersionInfo()
{
var sb = new StringBuilder();
#if !NETSTANDARD
// TODO: could use Microsoft.Extensions.PlatformAbstractions package to get this info
sb.Append(" .NET version: ").AppendLine(Environment.Version.ToString());
sb.Append(" Is .NET 4.5=").AppendLine(IsNet45OrNewer().ToString());
sb.Append(" OS version: ").AppendLine(Environment.OSVersion.ToString());
#endif
sb.AppendFormat(" GC Type={0} GCLatencyMode={1}",
GCSettings.IsServerGC ? "Server" : "Client",
Enum.GetName(typeof(GCLatencyMode), GCSettings.LatencyMode))
.AppendLine();
return sb.ToString();
}
internal static bool IsNet45OrNewer()
{
// From: http://stackoverflow.com/questions/8517159/how-to-detect-at-runtime-that-net-version-4-5-currently-running-your-code
// Class "ReflectionContext" exists from .NET 4.5 onwards.
return Type.GetType("System.Reflection.ReflectionContext", false) != null;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using System.Threading.Tasks;
namespace PresentationBus
{
public class PresentationBus : IPresentationBus, IPresentationBusConfiguration
{
private readonly Dictionary<Type, EventSubscribers> _subscribersByEventType;
private readonly Dictionary<Type, RequestSubscribers> _subscribersByRequestType;
private readonly Dictionary<Type, CommandSubscribers> _subscribersByCommandType;
public PresentationBus()
{
_subscribersByEventType = new Dictionary<Type, EventSubscribers>();
_subscribersByRequestType = new Dictionary<Type, RequestSubscribers>();
_subscribersByCommandType = new Dictionary<Type, CommandSubscribers>();
}
public void Subscribe(IHandlePresentationMessages instance)
{
var handlesEvents = instance as IHandlePresentationEvents;
if (handlesEvents != null)
DoSubscribe(handlesEvents);
var handlesCommands = instance as IHandlePresentationCommands;
if (handlesCommands != null)
DoSubscribe(handlesCommands);
var handlesRequests = instance as IHandlePresentationRequests;
if (handlesRequests != null)
DoSubscribe(handlesRequests);
}
public void UnSubscribe(IHandlePresentationMessages instance)
{
var handlesEvents = instance as IHandlePresentationEvents;
if (handlesEvents != null)
DoUnSubscribe(handlesEvents);
var handlesCommands = instance as IHandlePresentationCommands;
if (handlesCommands != null)
DoUnSubscribe(handlesCommands);
var handlesRequests = instance as IHandlePresentationRequests;
if (handlesRequests != null)
DoUnSubscribe(handlesRequests);
}
private void DoSubscribe(IHandlePresentationEvents instance)
{
ForEachHandledEvent(instance, x => SubscribeForEvents(x, instance));
}
private void SubscribeForEvents(Type eventType, object instance)
{
EventSubscribers eventSubscribersForEventType;
if (_subscribersByEventType.ContainsKey(eventType))
{
eventSubscribersForEventType = _subscribersByEventType[eventType];
}
else
{
eventSubscribersForEventType = new EventSubscribers();
_subscribersByEventType.Add(eventType, eventSubscribersForEventType);
}
eventSubscribersForEventType.AddSubscriber(instance);
}
private void DoUnSubscribe(IHandlePresentationEvents instance)
{
ForEachHandledEvent(instance, x => UnSubscribeForEvents(x, instance));
}
private void UnSubscribeForEvents(Type eventType, object handler)
{
if (_subscribersByEventType.ContainsKey(eventType))
{
_subscribersByEventType[eventType].RemoveSubscriber(handler);
}
}
private void DoSubscribe(IHandlePresentationCommands instance)
{
ForEachHandledCommand(instance, x => SubscribeForCommands(x, instance));
}
private void SubscribeForCommands(Type commandType, object instance)
{
CommandSubscribers commandSubscribersForCommandType;
if (_subscribersByCommandType.ContainsKey(commandType))
{
commandSubscribersForCommandType = _subscribersByCommandType[commandType];
}
else
{
commandSubscribersForCommandType = new CommandSubscribers();
_subscribersByCommandType.Add(commandType, commandSubscribersForCommandType);
}
commandSubscribersForCommandType.AddSubscriber(instance);
}
private void DoUnSubscribe(IHandlePresentationCommands instance)
{
ForEachHandledCommand(instance, x => UnSubscribeForCommands(x, instance));
}
private void UnSubscribeForCommands(Type commandType, object handler)
{
if (_subscribersByCommandType.ContainsKey(commandType))
{
_subscribersByCommandType[commandType].RemoveSubscriber(handler);
}
}
private void DoSubscribe(IHandlePresentationRequests instance)
{
ForEachHandledRequest(instance, x => SubscribeForRequests(x, instance));
}
private void SubscribeForRequests(Type requestType, object instance)
{
RequestSubscribers requestSubscribersForRequestType;
if (_subscribersByRequestType.ContainsKey(requestType))
{
requestSubscribersForRequestType = _subscribersByRequestType[requestType];
}
else
{
requestSubscribersForRequestType = new RequestSubscribers();
_subscribersByRequestType.Add(requestType, requestSubscribersForRequestType);
}
requestSubscribersForRequestType.AddSubscriber(instance);
}
private void DoUnSubscribe(IHandlePresentationRequests instance)
{
ForEachHandledRequest(instance, x => UnSubscribeForRequests(x, instance));
}
private void UnSubscribeForRequests(Type requestType, object handler)
{
if (_subscribersByRequestType.ContainsKey(requestType))
{
_subscribersByRequestType[requestType].RemoveSubscriber(handler);
}
}
public async Task PublishAsync<TEvent>(TEvent presentationEvent) where TEvent : IPresentationEvent
{
var type = presentationEvent.GetType();
var typeInfo = type.GetTypeInfo();
foreach (var subscribedType in _subscribersByEventType.Keys.Where(t => t.GetTypeInfo().IsAssignableFrom(typeInfo)).ToArray())
{
await _subscribersByEventType[subscribedType].PublishAsync(presentationEvent).ConfigureAwait(continueOnCapturedContext: false);
}
}
public async Task SendAsync<TCommand>(TCommand presentationEvent) where TCommand : IPresentationCommand
{
var type = presentationEvent.GetType();
var typeInfo = type.GetTypeInfo();
foreach (var subscribedType in _subscribersByCommandType.Keys.Where(t => t.GetTypeInfo().IsAssignableFrom(typeInfo)).ToArray())
{
await _subscribersByCommandType[subscribedType].SendAsync(presentationEvent).ConfigureAwait(continueOnCapturedContext: false);
}
}
public async Task<TResponse> RequestAsync<TRequest, TResponse>(IPresentationRequest<TRequest, TResponse> request)
where TRequest : IPresentationRequest<TRequest, TResponse>
where TResponse : IPresentationResponse
{
var type = request.GetType();
var typeInfo = type.GetTypeInfo();
var result = default(TResponse);
foreach (var subscribedType in _subscribersByRequestType.Keys.Where(t => t.GetTypeInfo().IsAssignableFrom(typeInfo)).ToArray())
{
var results = await _subscribersByRequestType[subscribedType].PublishRequestAsync<TRequest, TResponse>((TRequest)request).ConfigureAwait(continueOnCapturedContext: false);
result = results.FirstOrDefault();
if (result != null)
{
return result;
}
}
return result;
}
public async Task<IEnumerable<TResponse>> MulticastRequestAsync<TRequest, TResponse>(IPresentationRequest<TRequest, TResponse> request)
where TRequest : IPresentationRequest<TRequest, TResponse>
where TResponse : IPresentationResponse
{
var type = request.GetType();
var typeInfo = type.GetTypeInfo();
var results = new List<TResponse>();
foreach (var subscribedType in _subscribersByRequestType.Keys.Where(t => t.GetTypeInfo().IsAssignableFrom(typeInfo)).ToArray())
{
var result = await _subscribersByRequestType[subscribedType].PublishRequestAsync<TRequest,TResponse>((TRequest)request).ConfigureAwait(continueOnCapturedContext: false);
results.AddRange(result);
}
return results;
}
private void ForEachHandledEvent(IHandlePresentationEvents instance, Action<Type> callback)
{
var handlesEventsType = typeof(IHandlePresentationEvents);
var type = instance.GetType();
var interfaceTypes = type.GetTypeInfo().ImplementedInterfaces;
foreach (var interfaceType in interfaceTypes.Where(x => x.IsConstructedGenericType && handlesEventsType.GetTypeInfo().IsAssignableFrom(x.GetTypeInfo())))
{
var eventType = interfaceType.GenericTypeArguments.First();
callback(eventType);
}
}
private void ForEachHandledCommand(IHandlePresentationCommands instance, Action<Type> callback)
{
var handlesCommandsType = typeof(IHandlePresentationCommands);
var type = instance.GetType();
var interfaceTypes = type.GetTypeInfo().ImplementedInterfaces;
foreach (var interfaceType in interfaceTypes.Where(x => x.IsConstructedGenericType && handlesCommandsType.GetTypeInfo().IsAssignableFrom(x.GetTypeInfo())))
{
var commandType = interfaceType.GenericTypeArguments.First();
callback(commandType);
}
}
private void ForEachHandledRequest(IHandlePresentationRequests instance, Action<Type> callback)
{
var handlesRequestsType = typeof(IHandlePresentationRequests);
var type = instance.GetType();
var interfaceTypes = type.GetTypeInfo().ImplementedInterfaces;
foreach (var interfaceType in interfaceTypes.Where(x => x.IsConstructedGenericType && handlesRequestsType.GetTypeInfo().IsAssignableFrom(x.GetTypeInfo())))
{
var eventType = interfaceType.GenericTypeArguments.First();
callback(eventType);
}
}
internal class EventSubscribers
{
private readonly List<WeakReference> _subscribers;
public EventSubscribers()
{
_subscribers = new List<WeakReference>();
}
public void AddSubscriber<T>(IHandlePresentationEvent<T> instance) where T : IPresentationEvent
{
AddSubscriber((object)instance);
}
public void AddSubscriber<T>(IHandlePresentationEventAsync<T> instance) where T : IPresentationEvent
{
AddSubscriber((object)instance);
}
public void AddSubscriber(object instance)
{
if (_subscribers.Any(s => s.Target == instance))
return;
_subscribers.Add(new WeakReference(instance));
}
public void RemoveSubscriber<T>(IHandlePresentationEvent<T> instance) where T : IPresentationEvent
{
RemoveSubscriber((object)instance);
}
public void RemoveSubscriber(object instance)
{
var subscriber = _subscribers.SingleOrDefault(s => s.Target == instance);
if (subscriber != null)
{
_subscribers.Remove(subscriber);
}
}
public async Task<bool> PublishAsync<TEvent>(TEvent presentationEvent) where TEvent : IPresentationEvent
{
var anySubscribersStillListening = false;
foreach (var subscriber in _subscribers.Where(s => s.Target != null))
{
var syncHandler = subscriber.Target as IHandlePresentationEvent<TEvent>;
if (syncHandler != null)
syncHandler.Handle(presentationEvent);
var asyncHandler = subscriber.Target as IHandlePresentationEventAsync<TEvent>;
if (asyncHandler != null)
await asyncHandler.HandleAsync(presentationEvent).ConfigureAwait(continueOnCapturedContext: false);
anySubscribersStillListening = true;
}
return anySubscribersStillListening;
}
}
internal class RequestSubscribers
{
private readonly List<WeakReference> _subscribers;
public RequestSubscribers()
{
_subscribers = new List<WeakReference>();
}
public void AddSubscriber<TRequest, TResponse>(IHandlePresentationRequest<TRequest, TResponse> instance)
where TRequest : IPresentationRequest<TRequest, TResponse>
where TResponse : IPresentationResponse
{
AddSubscriber((object)instance);
}
public void AddSubscriber<TRequest, TResponse>(IHandlePresentationRequestAsync<TRequest, TResponse> instance)
where TRequest : IPresentationRequest<TRequest, TResponse>
where TResponse : IPresentationResponse
{
AddSubscriber((object)instance);
}
public void AddSubscriber(object instance)
{
if (_subscribers.Any(s => s.Target == instance))
return;
_subscribers.Add(new WeakReference(instance));
}
public void RemoveSubscriber<TRequest, TResponse>(IHandlePresentationRequest<TRequest, TResponse> instance)
where TRequest : IPresentationRequest<TRequest, TResponse>
where TResponse : IPresentationResponse
{
RemoveSubscriber((object)instance);
}
public void RemoveSubscriber(object instance)
{
var subscriber = _subscribers.SingleOrDefault(s => s.Target == instance);
if (subscriber != null)
{
_subscribers.Remove(subscriber);
}
}
public async Task<IEnumerable<TResponse>> PublishRequestAsync<TRequest, TResponse>(TRequest request)
where TRequest : IPresentationRequest<TRequest, TResponse>
where TResponse : IPresentationResponse
{
var results = new List<TResponse>();
foreach (var subscriber in _subscribers.Where(s => s.Target != null))
{
var syncHandler = subscriber.Target as IHandlePresentationRequest<TRequest, TResponse>;
if (syncHandler != null)
{
results.Add(syncHandler.Handle(request));
}
var asyncHandler = subscriber.Target as IHandlePresentationRequestAsync<TRequest, TResponse>;
if (asyncHandler != null)
{
results.Add(await asyncHandler.HandleAsync(request).ConfigureAwait(continueOnCapturedContext: false));
}
}
return results;
}
}
internal class CommandSubscribers
{
private readonly List<WeakReference> _subscribers;
public CommandSubscribers()
{
_subscribers = new List<WeakReference>();
}
public void AddSubscriber<T>(IHandlePresentationCommand<T> instance) where T : IPresentationCommand
{
AddSubscriber((object)instance);
}
public void AddSubscriber<T>(IHandlePresentationCommandAsync<T> instance) where T : IPresentationCommand
{
AddSubscriber((object)instance);
}
public void AddSubscriber(object instance)
{
if (_subscribers.Any(s => s.Target == instance))
return;
_subscribers.Add(new WeakReference(instance));
}
public void RemoveSubscriber<T>(IHandlePresentationCommand<T> instance) where T : IPresentationCommand
{
RemoveSubscriber((object)instance);
}
public void RemoveSubscriber(object instance)
{
var subscriber = _subscribers.SingleOrDefault(s => s.Target == instance);
if (subscriber != null)
{
_subscribers.Remove(subscriber);
}
}
public async Task<bool> SendAsync<TEvent>(TEvent presentationEvent) where TEvent : IPresentationCommand
{
var anySubscribersStillListening = false;
foreach (var subscriber in _subscribers.Where(s => s.Target != null))
{
var syncHandler = subscriber.Target as IHandlePresentationCommand<TEvent>;
if (syncHandler != null)
syncHandler.Handle(presentationEvent);
var asyncHandler = subscriber.Target as IHandlePresentationCommandAsync<TEvent>;
if (asyncHandler != null)
await asyncHandler.HandleAsync(presentationEvent).ConfigureAwait(continueOnCapturedContext: false);
anySubscribersStillListening = true;
}
return anySubscribersStillListening;
}
}
}
}
| |
//
// This file was generated by the BinaryNotes compiler.
// See http://bnotes.sourceforge.net
// Any modifications to this file will be lost upon recompilation of the source ASN.1.
//
using System.Collections.Generic;
using GSF.ASN1;
using GSF.ASN1.Attributes;
using GSF.ASN1.Coders;
using GSF.ASN1.Types;
namespace GSF.MMS.Model
{
[ASN1PreparedElement]
[ASN1Sequence(Name = "Event_Condition_List_instance", IsSet = false)]
public class Event_Condition_List_instance : IASN1PreparedElement
{
private static readonly IASN1PreparedElementData preparedData = CoderFactory.getInstance().newPreparedElementData(typeof(Event_Condition_List_instance));
private DefinitionChoiceType definition_;
private ObjectName name_;
[ASN1Element(Name = "name", IsOptional = false, HasTag = true, Tag = 0, HasDefaultValue = false)]
public ObjectName Name
{
get
{
return name_;
}
set
{
name_ = value;
}
}
[ASN1Element(Name = "definition", IsOptional = false, HasTag = false, HasDefaultValue = false)]
public DefinitionChoiceType Definition
{
get
{
return definition_;
}
set
{
definition_ = value;
}
}
public void initWithDefaults()
{
}
public IASN1PreparedElementData PreparedData
{
get
{
return preparedData;
}
}
[ASN1PreparedElement]
[ASN1Choice(Name = "definition")]
public class DefinitionChoiceType : IASN1PreparedElement
{
private static IASN1PreparedElementData preparedData = CoderFactory.getInstance().newPreparedElementData(typeof(DefinitionChoiceType));
private DetailsSequenceType details_;
private bool details_selected;
private ObjectIdentifier reference_;
private bool reference_selected;
[ASN1ObjectIdentifier(Name = "")]
[ASN1Element(Name = "reference", IsOptional = false, HasTag = true, Tag = 1, HasDefaultValue = false)]
public ObjectIdentifier Reference
{
get
{
return reference_;
}
set
{
selectReference(value);
}
}
[ASN1Element(Name = "details", IsOptional = false, HasTag = true, Tag = 2, HasDefaultValue = false)]
public DetailsSequenceType Details
{
get
{
return details_;
}
set
{
selectDetails(value);
}
}
public void initWithDefaults()
{
}
public IASN1PreparedElementData PreparedData
{
get
{
return preparedData;
}
}
public bool isReferenceSelected()
{
return reference_selected;
}
public void selectReference(ObjectIdentifier val)
{
reference_ = val;
reference_selected = true;
details_selected = false;
}
public bool isDetailsSelected()
{
return details_selected;
}
public void selectDetails(DetailsSequenceType val)
{
details_ = val;
details_selected = true;
reference_selected = false;
}
[ASN1PreparedElement]
[ASN1Sequence(Name = "details", IsSet = false)]
public class DetailsSequenceType : IASN1PreparedElement
{
private static IASN1PreparedElementData preparedData = CoderFactory.getInstance().newPreparedElementData(typeof(DetailsSequenceType));
private Access_Control_List_instance accessControl_;
private ICollection<Event_Condition_List_instance> eventConditionLists_;
private ICollection<Event_Condition_instance> eventConditions_;
private ICollection<Event_Condition_List_instance> referencingEventConditionLists_;
[ASN1Element(Name = "accessControl", IsOptional = false, HasTag = true, Tag = 3, HasDefaultValue = false)]
public Access_Control_List_instance AccessControl
{
get
{
return accessControl_;
}
set
{
accessControl_ = value;
}
}
[ASN1SequenceOf(Name = "eventConditions", IsSetOf = false)]
[ASN1Element(Name = "eventConditions", IsOptional = false, HasTag = true, Tag = 4, HasDefaultValue = false)]
public ICollection<Event_Condition_instance> EventConditions
{
get
{
return eventConditions_;
}
set
{
eventConditions_ = value;
}
}
[ASN1SequenceOf(Name = "eventConditionLists", IsSetOf = false)]
[ASN1Element(Name = "eventConditionLists", IsOptional = false, HasTag = true, Tag = 5, HasDefaultValue = false)]
public ICollection<Event_Condition_List_instance> EventConditionLists
{
get
{
return eventConditionLists_;
}
set
{
eventConditionLists_ = value;
}
}
[ASN1SequenceOf(Name = "referencingEventConditionLists", IsSetOf = false)]
[ASN1Element(Name = "referencingEventConditionLists", IsOptional = false, HasTag = true, Tag = 6, HasDefaultValue = false)]
public ICollection<Event_Condition_List_instance> ReferencingEventConditionLists
{
get
{
return referencingEventConditionLists_;
}
set
{
referencingEventConditionLists_ = value;
}
}
public void initWithDefaults()
{
}
public IASN1PreparedElementData PreparedData
{
get
{
return preparedData;
}
}
}
}
}
}
| |
using System;
using Eto.Forms;
using Eto.Drawing;
#if XAMMAC2
using AppKit;
using Foundation;
using CoreGraphics;
using ObjCRuntime;
using CoreAnimation;
#else
using MonoMac.AppKit;
using MonoMac.Foundation;
using MonoMac.CoreGraphics;
using MonoMac.ObjCRuntime;
using MonoMac.CoreAnimation;
#if Mac64
using CGSize = MonoMac.Foundation.NSSize;
using CGRect = MonoMac.Foundation.NSRect;
using CGPoint = MonoMac.Foundation.NSPoint;
using nfloat = System.Double;
using nint = System.Int64;
using nuint = System.UInt64;
#else
using CGSize = System.Drawing.SizeF;
using CGRect = System.Drawing.RectangleF;
using CGPoint = System.Drawing.PointF;
using nfloat = System.Single;
using nint = System.Int32;
using nuint = System.UInt32;
#endif
#endif
namespace Eto.Mac.Forms.Controls
{
public class DateTimePickerHandler : MacControl<NSDatePicker, DateTimePicker, DateTimePicker.ICallback>, DateTimePicker.IHandler
{
DateTime? curValue;
DateTimePickerMode mode;
public class EtoDatePicker : NSDatePicker, IMacControl
{
public override void DrawRect(CGRect dirtyRect)
{
if (Handler.curValue != null)
base.DrawRect(dirtyRect);
else
{
// paint with no elements visible
var old = DatePickerElements;
DatePickerElements = 0;
base.DrawRect(dirtyRect);
DatePickerElements = old;
}
}
public WeakReference WeakHandler { get; set; }
public DateTimePickerHandler Handler
{
get { return (DateTimePickerHandler)WeakHandler.Target; }
set { WeakHandler = new WeakReference(value); }
}
}
public DateTimePickerHandler()
{
Control = new EtoDatePicker
{
Handler = this,
TimeZone = NSTimeZone.LocalTimeZone,
Calendar = NSCalendar.CurrentCalendar,
DateValue = DateTime.Now.ToNS()
};
this.Mode = DateTimePickerMode.Date;
// apple+backspace clears the value
Control.ValidateProposedDateValue += HandleValidateProposedDateValue;
}
protected override void Initialize()
{
base.Initialize();
Widget.KeyDown += HandleKeyDown;
// when clicking, set the value if it is null
Widget.MouseDown += HandleMouseDown;
}
static void HandleKeyDown(object sender, KeyEventArgs e)
{
var handler = (DateTimePickerHandler)((Control)sender).Handler;
if (!e.Handled)
{
if (e.KeyData == (Keys.Application | Keys.Backspace))
{
handler.curValue = null;
handler.Callback.OnValueChanged(handler.Widget, EventArgs.Empty);
handler.Control.NeedsDisplay = true;
}
}
}
static void HandleMouseDown(object sender, MouseEventArgs e)
{
var handler = (DateTimePickerHandler)((Control)sender).Handler;
if (e.Buttons == MouseButtons.Primary)
{
if (handler.curValue == null)
{
handler.curValue = handler.Control.DateValue.ToEto();
handler.Callback.OnValueChanged(handler.Widget, EventArgs.Empty);
handler.Control.NeedsDisplay = true;
}
}
}
static void HandleValidateProposedDateValue(object sender, NSDatePickerValidatorEventArgs e)
{
var datePickerCell = (NSDatePickerCell)sender;
var handler = GetHandler(datePickerCell.ControlView) as DateTimePickerHandler;
var date = e.ProposedDateValue.ToEto();
if (date != handler.Control.DateValue.ToEto())
{
handler.curValue = date;
handler.Callback.OnValueChanged(handler.Widget, EventArgs.Empty);
}
}
protected override SizeF GetNaturalSize(SizeF availableSize)
{
return SizeF.Max(new Size(mode == DateTimePickerMode.DateTime ? 180 : 120, 10), base.GetNaturalSize(availableSize));
}
public DateTimePickerMode Mode
{
get { return mode; }
set
{
mode = value;
switch (mode)
{
case DateTimePickerMode.Date:
Control.DatePickerElements = NSDatePickerElementFlags.YearMonthDateDay;
break;
case DateTimePickerMode.Time:
Control.DatePickerElements = NSDatePickerElementFlags.HourMinuteSecond;
break;
case DateTimePickerMode.DateTime:
Control.DatePickerElements = NSDatePickerElementFlags.YearMonthDateDay | NSDatePickerElementFlags.HourMinuteSecond;
break;
default:
throw new NotSupportedException();
}
}
}
public DateTime MinDate
{
get { return Control.MinDate.ToEto() ?? DateTime.MinValue; }
set { Control.MinDate = value.ToNS(); }
}
public DateTime MaxDate
{
get { return Control.MaxDate.ToEto() ?? DateTime.MaxValue; }
set { Control.MaxDate = value.ToNS(); }
}
public DateTime? Value
{
get
{
return curValue;
}
set
{
if (value != curValue)
{
curValue = value;
// don't validate otherwise the new value gets overridden when null
Control.ValidateProposedDateValue -= HandleValidateProposedDateValue;
Control.DateValue = (value ?? DateTime.Now).ToNS();
Control.ValidateProposedDateValue += HandleValidateProposedDateValue;
Callback.OnValueChanged(Widget, EventArgs.Empty);
}
}
}
public Color TextColor
{
get { return Control.TextColor.ToEto(); }
set { Control.TextColor = value.ToNSUI(); }
}
protected override void SetBackgroundColor(Color? color)
{
if (color != null)
{
Control.Cell.BackgroundColor = color.Value.ToNSUI();
Control.Cell.DrawsBackground = true;
}
else
{
Control.Cell.BackgroundColor = NSColor.ControlBackground;
Control.Cell.DrawsBackground = true;
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System;
using System.Threading;
using System.Collections.Generic;
using System.Runtime.InteropServices;
// disable warning about unused weakref
#pragma warning disable 414
internal interface PinnedObject
{
void CleanUp();
bool IsPinned();
}
namespace GCSimulator
{
public enum LifeTimeENUM
{
Short,
Medium,
Long
}
public interface LifeTime
{
LifeTimeENUM LifeTime
{
get;
set;
}
}
public interface LifeTimeStrategy
{
int NextObject(LifeTimeENUM lifeTime);
bool ShouldDie(LifeTime o, int index);
}
/// <summary>
/// This interfact abstract the object contaienr , allowing us to specify differnt datastructures
/// implementation.
/// The only restriction on the ObjectContainer is that the objects contained in it must implement
/// LifeTime interface.
/// Right now we have a simple array container as a stock implementation for that. for more information
/// see code:#ArrayContainer
/// </summary>
/// <param name="o"></param>
/// <param name="index"></param>
public interface ObjectContainer<T> where T : LifeTime
{
void Init(int numberOfObjects);
void AddObjectAt(T o, int index);
T GetObject(int index);
T SetObjectAt(T o, int index);
int Count
{
get;
}
}
public sealed class BinaryTreeObjectContainer<T> : ObjectContainer<T> where T : LifeTime
{
private class Node
{
public Node LeftChild;
public Node RightChild;
public int id;
public T Data;
}
private Node _root;
private int _count;
public BinaryTreeObjectContainer()
{
_root = null;
_count = 0;
}
public void Init(int numberOfObjects)
{
if (numberOfObjects <= 0)
{
return;
}
_root = new Node();
_root.id = 0;
// the total number of objects in a binary search tree = (2^n+1) - 1
// where n is the depth of the tree
int depth = (int)Math.Log(numberOfObjects, 2);
_count = numberOfObjects;
_root.LeftChild = CreateTree(depth, 1);
_root.RightChild = CreateTree(depth, 2);
}
public void AddObjectAt(T o, int index)
{
Node node = Find(index, _root);
if (node != null)
{
node.Data = o;
}
}
public T GetObject(int index)
{
Node node = Find(index, _root);
if (node == null)
{
return default(T);
}
return node.Data;
}
public T SetObjectAt(T o, int index)
{
Node node = Find(index, _root);
if (node == null)
{
return default(T);
}
T old = node.Data;
node.Data = o;
return old;
}
public int Count
{
get
{
return _count;
}
}
private Node CreateTree(int depth, int id)
{
if (depth <= 0 || id >= Count)
{
return null;
}
Node node = new Node();
node.id = id;
node.LeftChild = CreateTree(depth - 1, id * 2 + 1);
node.RightChild = CreateTree(depth - 1, id * 2 + 2);
return node;
}
private Node Find(int id, Node node)
{
// we want to implement find and try to avoid creating temp objects..
// Our Tree is fixed size, we don;t allow modifying the actual
// tree by adding or deleting nodes ( that would be more
// interesting, but would give us inconsistent perf numbers.
// Traverse the tree ( slow, but avoids allocation ), we can write
// another tree that is a BST, or use SortedList<T,T> which uses
// BST as the implementation
if (node == null)
return null;
if (id == node.id)
return node;
Node retNode = null;
// find in the left child
retNode = Find(id, node.LeftChild);
// if not found, try the right child.
if (retNode == null)
retNode = Find(id, node.RightChild);
return retNode;
}
}
//#ArrayContainer Simple Array Stock Implemntation for ObjectContainer
public sealed class ArrayObjectContainer<T> : ObjectContainer<T> where T : LifeTime
{
private T[] _objContainer = null;
public void Init(int numberOfObjects)
{
_objContainer = new T[numberOfObjects];
}
public void AddObjectAt(T o, int index)
{
_objContainer[index] = o;
}
public T GetObject(int index)
{
return _objContainer[index];
}
public T SetObjectAt(T o, int index)
{
T old = _objContainer[index];
_objContainer[index] = o;
return old;
}
public int Count
{
get
{
return _objContainer.Length;
}
}
}
public delegate void ObjectDiedEventHandler(LifeTime o, int index);
public sealed class ObjectLifeTimeManager
{
private LifeTimeStrategy _strategy;
private ObjectContainer<LifeTime> _objectContainer = null;
//
public void SetObjectContainer(ObjectContainer<LifeTime> objectContainer)
{
_objectContainer = objectContainer;
}
public event ObjectDiedEventHandler objectDied;
public void Init(int numberObjects)
{
_objectContainer.Init(numberObjects);
//objContainer = new object[numberObjects];
}
public LifeTimeStrategy LifeTimeStrategy
{
set
{
_strategy = value;
}
}
public void AddObject(LifeTime o, int index)
{
_objectContainer.AddObjectAt(o, index);
//objContainer[index] = o;
}
public void Run()
{
LifeTime objLifeTime;
for (int i = 0; i < _objectContainer.Count; ++i)
{
objLifeTime = _objectContainer.GetObject(i);
//object o = objContainer[i];
//objLifeTime = o as LifeTime;
if (_strategy.ShouldDie(objLifeTime, i))
{
int index = _strategy.NextObject(objLifeTime.LifeTime);
LifeTime oldObject = _objectContainer.SetObjectAt(null, index);
//objContainer[index] = null;
// fire the event
objectDied(oldObject, index);
}
}
}
}
internal class RandomLifeTimeStrategy : LifeTimeStrategy
{
private int _counter = 0;
private int _mediumLifeTime = 30;
private int _shortLifeTime = 3;
private int _mediumDataCount = 1000000;
private int _shortDataCount = 5000;
private Random _rand = new Random(123456);
public RandomLifeTimeStrategy(int mediumlt, int shortlt, int mdc, int sdc)
{
_mediumLifeTime = mediumlt;
_shortLifeTime = shortlt;
_mediumDataCount = mdc;
_shortDataCount = sdc;
}
public int MediumLifeTime
{
set
{
_mediumLifeTime = value;
}
}
public int ShortLifeTime
{
set
{
_shortLifeTime = value;
}
}
public int NextObject(LifeTimeENUM lifeTime)
{
switch (lifeTime)
{
case LifeTimeENUM.Short:
return _rand.Next() % _shortDataCount;
case LifeTimeENUM.Medium:
return (_rand.Next() % _mediumDataCount) + _shortDataCount;
case LifeTimeENUM.Long:
return 0;
}
return 0;
}
public bool ShouldDie(LifeTime o, int index)
{
_counter++;
LifeTimeENUM lifeTime = o.LifeTime;
switch (lifeTime)
{
case LifeTimeENUM.Short:
if (_counter % _shortLifeTime == 0)
return true;
break;
case LifeTimeENUM.Medium:
if (_counter % _mediumLifeTime == 0)
return true;
break;
case LifeTimeENUM.Long:
return false;
}
return false;
}
}
/// <summary>
/// we might want to implement a different strategy that decide the life time of the object based on the time
/// elabsed since the last object acceess.
///
/// </summary>
internal class TimeBasedLifeTimeStrategy : LifeTimeStrategy
{
private int _lastMediumTickCount = Environment.TickCount;
private int _lastShortTickCount = Environment.TickCount;
private int _lastMediumIndex = 0;
private int _lastShortIndex = 0;
public int NextObject(LifeTimeENUM lifeTime)
{
switch (lifeTime)
{
case LifeTimeENUM.Short:
return _lastShortIndex;
case LifeTimeENUM.Medium:
return _lastMediumIndex;
case LifeTimeENUM.Long:
return 0;
}
return 0;
}
public bool ShouldDie(LifeTime o, int index)
{
LifeTimeENUM lifeTime = o.LifeTime;
// short objects will live for 20 seconds, long objects will live for more.
switch (lifeTime)
{
case LifeTimeENUM.Short:
if (Environment.TickCount - _lastShortTickCount > 1) // this is in accureat enumber, since
// we will be finsh iterating throuh the short life time object in less than 1 ms , so we need
// to switch either to QueryPeroformanceCounter, or to block the loop for some time through
// Thread.Sleep, the other solution is to increase the number of objects a lot.
{
_lastShortTickCount = Environment.TickCount;
_lastShortIndex = index;
return true;
}
break;
case LifeTimeENUM.Medium:
if (Environment.TickCount - _lastMediumTickCount > 20)
{
_lastMediumTickCount = Environment.TickCount;
_lastMediumIndex = index;
return true;
}
break;
case LifeTimeENUM.Long:
break;
}
return false;
}
}
internal class ObjectWrapper : LifeTime, PinnedObject
{
private bool _pinned;
private bool _weakReferenced;
private GCHandle _gcHandle;
private LifeTimeENUM _lifeTime;
private WeakReference _weakRef;
private byte[] _data;
private int _dataSize;
public int DataSize
{
set
{
_dataSize = value;
_data = new byte[_dataSize];
if (_pinned)
{
_gcHandle = GCHandle.Alloc(_data, GCHandleType.Pinned);
}
if (_weakReferenced)
{
_weakRef = new WeakReference(_data);
_data = null;
}
}
}
public LifeTimeENUM LifeTime
{
get
{
return _lifeTime;
}
set
{
_lifeTime = value;
}
}
public bool IsPinned()
{
return _pinned;
}
public bool IsWeak()
{
return _weakReferenced;
}
public void CleanUp()
{
if (_gcHandle.IsAllocated)
{
_gcHandle.Free();
}
}
public ObjectWrapper(bool runFinalizer, bool pinned, bool weakReferenced)
{
_pinned = pinned;
_weakReferenced = weakReferenced;
if (!runFinalizer)
{
GC.SuppressFinalize(this);
}
}
~ObjectWrapper()
{
// DO SOMETHING UNCONVENTIONAL IN FINALIZER
_data = new byte[_dataSize];
CleanUp();
}
}
internal class ClientSimulator
{
[ThreadStatic]
private static ObjectLifeTimeManager s_lifeTimeManager;
private static int s_meanAllocSize = 17;
private static int s_mediumLifeTime = 30;
private static int s_shortLifeTime = 3;
private static int s_mediumDataSize = s_meanAllocSize;
private static int s_shortDataSize = s_meanAllocSize;
private static int s_mediumDataCount = 1000000;
private static int s_shortDataCount = 5000;
private static int s_countIters = 500;
private static float s_percentPinned = 0.1F;
private static float s_percentWeak = 0.0F;
private static int s_numThreads = 1;
private static bool s_runFinalizer = false;
private static string s_strategy = "Random";
private static string s_objectGraph = "List";
private static List<Thread> s_threadList = new List<Thread>();
private static Stopwatch s_stopWatch = new Stopwatch();
private static Object s_objLock = new Object();
private static uint s_currentIterations = 0;
//keep track of the collection count for generations 0, 1, 2
private static int[] s_currentCollections = new int[3];
private static int s_outputFrequency = 0; //after how many iterations the data is printed
private static System.TimeSpan s_totalTime;
public static int Main(string[] args)
{
s_stopWatch.Start();
for (int i = 0; i < 3; i++)
{
s_currentCollections[i] = 0;
}
if (!ParseArgs(args))
return 101;
// Run the test.
for (int i = 0; i < s_numThreads; ++i)
{
Thread thread = new Thread(RunTest);
s_threadList.Add(thread);
thread.Start();
}
foreach (Thread t in s_threadList)
{
t.Join();
}
return 100;
}
public static void RunTest()
{
// Allocate the objects.
s_lifeTimeManager = new ObjectLifeTimeManager();
LifeTimeStrategy ltStrategy;
int threadMediumLifeTime = s_mediumLifeTime;
int threadShortLifeTime = s_shortLifeTime;
int threadMediumDataSize = s_mediumDataSize;
int threadShortDataSize = s_shortDataSize;
int threadMediumDataCount = s_mediumDataCount;
int threadShortDataCount = s_shortDataCount;
float threadPercentPinned = s_percentPinned;
float threadPercentWeak = s_percentWeak;
bool threadRunFinalizer = s_runFinalizer;
string threadStrategy = s_strategy;
string threadObjectGraph = s_objectGraph;
if (threadObjectGraph.ToLower() == "tree")
{
s_lifeTimeManager.SetObjectContainer(new BinaryTreeObjectContainer<LifeTime>());
}
else
{
s_lifeTimeManager.SetObjectContainer(new ArrayObjectContainer<LifeTime>());
}
s_lifeTimeManager.Init(threadShortDataCount + threadMediumDataCount);
if (threadStrategy.ToLower() == "random")
{
ltStrategy = new RandomLifeTimeStrategy(threadMediumLifeTime, threadShortLifeTime, threadMediumDataCount, threadShortDataCount);
}
else
{
// may be we need to specify the elapsed time.
ltStrategy = new TimeBasedLifeTimeStrategy();
}
s_lifeTimeManager.LifeTimeStrategy = ltStrategy;
s_lifeTimeManager.objectDied += new ObjectDiedEventHandler(objectDied);
for (int i = 0; i < threadShortDataCount + threadMediumDataCount; ++i)
{
bool pinned = false;
if (threadPercentPinned != 0)
{
pinned = (i % ((int)(1 / threadPercentPinned)) == 0);
}
bool weak = false;
if (threadPercentWeak != 0)
{
weak = (i % ((int)(1 / threadPercentWeak)) == 0);
}
ObjectWrapper oWrapper = new ObjectWrapper(threadRunFinalizer, pinned, weak);
if (i < threadShortDataCount)
{
oWrapper.DataSize = threadShortDataSize;
oWrapper.LifeTime = LifeTimeENUM.Short;
}
else
{
oWrapper.DataSize = threadMediumDataSize;
oWrapper.LifeTime = LifeTimeENUM.Medium;
}
s_lifeTimeManager.AddObject(oWrapper, i);
}
lock (s_objLock)
{
Console.WriteLine("Thread {0} Running With Configuration: ", System.Threading.Thread.CurrentThread.ManagedThreadId);
Console.WriteLine("==============================");
Console.WriteLine("[Thread] Medium Lifetime " + threadMediumLifeTime);
Console.WriteLine("[Thread] Short Lifetime " + threadShortLifeTime);
Console.WriteLine("[Thread] Medium Data Size " + threadMediumDataSize);
Console.WriteLine("[Thread] Short Data Size " + threadShortDataSize);
Console.WriteLine("[Thread] Medium Data Count " + threadMediumDataCount);
Console.WriteLine("[Thread] Short Data Count " + threadShortDataCount);
Console.WriteLine("[Thread] % Pinned " + threadPercentPinned);
Console.WriteLine("[Thread] % Weak " + threadPercentWeak);
Console.WriteLine("[Thread] RunFinalizers " + threadRunFinalizer);
Console.WriteLine("[Thread] Strategy " + threadStrategy);
Console.WriteLine("[Thread] Object Graph " + threadObjectGraph);
Console.WriteLine("==============================");
}
for (int i = 0; i < s_countIters; ++i)
{
// Run the test.
s_lifeTimeManager.Run();
if (s_outputFrequency > 0)
{
lock (s_objLock)
{
s_currentIterations++;
if (s_currentIterations % s_outputFrequency == 0)
{
Console.WriteLine("Iterations = {0}", s_currentIterations);
Console.WriteLine("AllocatedMemory = {0} bytes", GC.GetTotalMemory(false));
//get the number of collections and the elapsed time for this group of iterations
int[] collectionCount = new int[3];
for (int j = 0; j < 3; j++)
{
collectionCount[j] = GC.CollectionCount(j);
}
int[] newCollections = new int[3];
for (int j = 0; j < 3; j++)
{
newCollections[j] = collectionCount[j] - s_currentCollections[j];
}
//update the running count of collections
for (int j = 0; j < 3; j++)
{
s_currentCollections[j] = collectionCount[j];
}
Console.WriteLine("Gen 0 Collections = {0}", newCollections[0]);
Console.WriteLine("Gen 1 Collections = {0}", newCollections[1]);
Console.WriteLine("Gen 2 Collections = {0}", newCollections[2]);
s_stopWatch.Stop();
Console.Write("Elapsed time: ");
System.TimeSpan tSpan = s_stopWatch.Elapsed;
if (tSpan.Days > 0)
Console.Write("{0} days, ", tSpan.Days);
if (tSpan.Hours > 0)
Console.Write("{0} hours, ", tSpan.Hours);
if (tSpan.Minutes > 0)
Console.Write("{0} minutes, ", tSpan.Minutes);
Console.Write("{0} seconds, ", tSpan.Seconds);
Console.Write("{0} milliseconds", tSpan.Milliseconds);
s_totalTime += tSpan;
s_stopWatch.Reset();
s_stopWatch.Start();
Console.Write(" (Total time: ");
if (s_totalTime.Days > 0)
Console.Write("{0} days, ", s_totalTime.Days);
if (s_totalTime.Hours > 0)
Console.Write("{0} hours, ", s_totalTime.Hours);
if (s_totalTime.Minutes > 0)
Console.Write("{0} minutes, ", s_totalTime.Minutes);
Console.Write("{0} seconds, ", s_totalTime.Seconds);
Console.WriteLine("{0} milliseconds)", s_totalTime.Milliseconds);
Console.WriteLine("----------------------------------");
}
}
}
}
}
private static void objectDied(LifeTime lifeTime, int index)
{
// put a new fresh object instead;
ObjectWrapper oWrapper = lifeTime as ObjectWrapper;
oWrapper.CleanUp();
oWrapper = new ObjectWrapper(s_runFinalizer, oWrapper.IsPinned(), oWrapper.IsWeak());
oWrapper.LifeTime = lifeTime.LifeTime;
oWrapper.DataSize = lifeTime.LifeTime == LifeTimeENUM.Short ? s_shortDataSize : s_mediumDataSize;
s_lifeTimeManager.AddObject(oWrapper, index);
}
/// <summary>
/// Parse the arguments, no error checking is done yet.
/// TODO: Add more error checking.
///
/// Populate variables with defaults, then overwrite them with config settings. Finally overwrite them with command line parameters
/// </summary>
public static bool ParseArgs(string[] args)
{
s_countIters = 500;
try
{
for (int i = 0; i < args.Length; ++i)
{
string currentArg = args[i];
string currentArgValue;
if (currentArg.StartsWith("-") || currentArg.StartsWith("/"))
{
currentArg = currentArg.Substring(1);
}
else
{
Console.WriteLine("Error! Unexpected argument {0}", currentArg);
return false;
}
if (currentArg.StartsWith("?"))
{
Usage();
System.Environment.FailFast("displayed help");
}
else if (currentArg.StartsWith("iter") || currentArg.Equals("i")) // number of iterations
{
currentArgValue = args[++i];
s_countIters = Int32.Parse(currentArgValue);
}
else if (currentArg.StartsWith("datasize") || currentArg.Equals("dz"))
{
currentArgValue = args[++i];
s_mediumDataSize = Int32.Parse(currentArgValue);
}
else if (currentArg.StartsWith("sdatasize") || currentArg.Equals("sdz"))
{
currentArgValue = args[++i];
s_shortDataSize = Int32.Parse(currentArgValue);
}
else if (currentArg.StartsWith("datacount") || currentArg.Equals("dc"))
{
currentArgValue = args[++i];
s_mediumDataCount = Int32.Parse(currentArgValue);
}
else if (currentArg.StartsWith("sdatacount") || currentArg.Equals("sdc"))
{
currentArgValue = args[++i];
s_shortDataCount = Int32.Parse(currentArgValue);
}
else if (currentArg.StartsWith("lifetime") || currentArg.Equals("lt"))
{
currentArgValue = args[++i];
s_shortLifeTime = Int32.Parse(currentArgValue);
s_mediumLifeTime = s_shortLifeTime * 10;
}
else if (currentArg.StartsWith("threads") || currentArg.Equals("t"))
{
currentArgValue = args[++i];
s_numThreads = Int32.Parse(currentArgValue);
}
else if (currentArg.StartsWith("fin") || currentArg.Equals("f"))
{
s_runFinalizer = true;
}
else if (currentArg.StartsWith("datapinned") || currentArg.StartsWith("dp")) // percentage data pinned
{
currentArgValue = args[++i];
s_percentPinned = float.Parse(currentArgValue, System.Globalization.CultureInfo.InvariantCulture);
if (s_percentPinned < 0 || s_percentPinned > 1)
{
Console.WriteLine("Error! datapinned should be a number from 0 to 1");
return false;
}
}
else if (currentArg.StartsWith("strategy")) //strategy that if the object died or not
{
currentArgValue = args[++i];
if ((currentArgValue.ToLower() == "random") || (currentArgValue.ToLower() == "time"))
s_strategy = currentArgValue;
else
{
Console.WriteLine("Error! Unexpected argument for strategy: {0}", currentArgValue);
return false;
}
}
else if (currentArg.StartsWith("dataweak") || currentArg.StartsWith("dw"))
{
currentArgValue = args[++i];
s_percentWeak = float.Parse(currentArgValue, System.Globalization.CultureInfo.InvariantCulture);
if (s_percentWeak < 0 || s_percentWeak > 1)
{
Console.WriteLine("Error! dataweak should be a number from 0 to 1");
return false;
}
}
else if (currentArg.StartsWith("objectgraph") || currentArg.StartsWith("og"))
{
currentArgValue = args[++i];
if ((currentArgValue.ToLower() == "tree") || (currentArgValue.ToLower() == "list"))
s_objectGraph = currentArgValue;
else
{
Console.WriteLine("Error! Unexpected argument for objectgraph: {0}", currentArgValue);
return false;
}
}
else if (currentArg.Equals("out")) //output frequency
{
currentArgValue = args[++i];
s_outputFrequency = int.Parse(currentArgValue);
}
else
{
Console.WriteLine("Error! Unexpected argument {0}", currentArg);
return false;
}
}
}
catch (System.Exception e)
{
Console.WriteLine("Incorrect arguments");
Console.WriteLine(e.ToString());
return false;
}
return true;
}
public static void Usage()
{
Console.WriteLine("GCSimulator [-?] [options]");
Console.WriteLine("\nOptions");
Console.WriteLine("\nGlobal:");
Console.WriteLine("-? Display the usage and exit");
Console.WriteLine("-i [-iter] <num iterations> : specify number of iterations for the test, default is " + s_countIters);
Console.WriteLine("\nThreads:");
Console.WriteLine("-t [-threads] <number of threads> : specifiy number of threads, default is " + s_numThreads);
Console.WriteLine("\nData:");
Console.WriteLine("-dz [-datasize] <data size> : specify the data size in bytes, default is " + s_mediumDataSize);
Console.WriteLine("-sdz [sdatasize] <data size> : specify the short lived data size in bytes, default is " + s_shortDataSize);
Console.WriteLine("-dc [datacount] <data count> : specify the medium lived data count, default is " + s_mediumDataCount);
Console.WriteLine("-sdc [sdatacount] <data count> : specify the short lived data count, default is " + s_shortDataCount);
Console.WriteLine("-lt [-lifetime] <number> : specify the life time of the objects, default is " + s_shortLifeTime);
Console.WriteLine("-f [-fin] : specify whether to do allocation in finalizer or not, default is no");
Console.WriteLine("-dp [-datapinned] <percent of data pinned> : specify the percentage of data that we want to pin (number from 0 to 1), default is " + s_percentPinned);
Console.WriteLine("-dw [-dataweak] <percent of data weak referenced> : specify the percentage of data that we want to weak reference, (number from 0 to 1) default is " + s_percentWeak);
Console.WriteLine("-strategy < indicate the strategy for deciding when the objects should die, right now we support only Random and Time strategy, default is Random");
Console.WriteLine("-og [-objectgraph] <List|Tree> : specify whether to use a List- or Tree-based object graph, default is " + s_objectGraph);
Console.WriteLine("-out <iterations> : after how many iterations to output data");
}
}
}
| |
using System;
using System.Globalization;
using System.Collections.Generic;
using Sasoma.Utils;
using Sasoma.Microdata.Interfaces;
using Sasoma.Languages.Core;
using Sasoma.Microdata.Properties;
namespace Sasoma.Microdata.Types
{
/// <summary>
/// A radio station.
/// </summary>
public class RadioStation_Core : TypeCore, ILocalBusiness
{
public RadioStation_Core()
{
this._TypeId = 221;
this._Id = "RadioStation";
this._Schema_Org_Url = "http://schema.org/RadioStation";
string label = "";
GetLabel(out label, "RadioStation", typeof(RadioStation_Core));
this._Label = label;
this._Ancestors = new int[]{266,193,155};
this._SubTypes = new int[0];
this._SuperTypes = new int[]{155};
this._Properties = new int[]{67,108,143,229,5,10,49,85,91,98,115,135,159,199,196,47,75,77,94,95,130,137,36,60,152,156,167};
}
/// <summary>
/// Physical address of the item.
/// </summary>
private Address_Core address;
public Address_Core Address
{
get
{
return address;
}
set
{
address = value;
SetPropertyInstance(address);
}
}
/// <summary>
/// The overall rating, based on a collection of reviews or ratings, of the item.
/// </summary>
private Properties.AggregateRating_Core aggregateRating;
public Properties.AggregateRating_Core AggregateRating
{
get
{
return aggregateRating;
}
set
{
aggregateRating = value;
SetPropertyInstance(aggregateRating);
}
}
/// <summary>
/// The larger organization that this local business is a branch of, if any.
/// </summary>
private BranchOf_Core branchOf;
public BranchOf_Core BranchOf
{
get
{
return branchOf;
}
set
{
branchOf = value;
SetPropertyInstance(branchOf);
}
}
/// <summary>
/// A contact point for a person or organization.
/// </summary>
private ContactPoints_Core contactPoints;
public ContactPoints_Core ContactPoints
{
get
{
return contactPoints;
}
set
{
contactPoints = value;
SetPropertyInstance(contactPoints);
}
}
/// <summary>
/// The basic containment relation between places.
/// </summary>
private ContainedIn_Core containedIn;
public ContainedIn_Core ContainedIn
{
get
{
return containedIn;
}
set
{
containedIn = value;
SetPropertyInstance(containedIn);
}
}
/// <summary>
/// The currency accepted (in <a href=\http://en.wikipedia.org/wiki/ISO_4217\ target=\new\>ISO 4217 currency format</a>).
/// </summary>
private CurrenciesAccepted_Core currenciesAccepted;
public CurrenciesAccepted_Core CurrenciesAccepted
{
get
{
return currenciesAccepted;
}
set
{
currenciesAccepted = value;
SetPropertyInstance(currenciesAccepted);
}
}
/// <summary>
/// A short description of the item.
/// </summary>
private Description_Core description;
public Description_Core Description
{
get
{
return description;
}
set
{
description = value;
SetPropertyInstance(description);
}
}
/// <summary>
/// Email address.
/// </summary>
private Email_Core email;
public Email_Core Email
{
get
{
return email;
}
set
{
email = value;
SetPropertyInstance(email);
}
}
/// <summary>
/// People working for this organization.
/// </summary>
private Employees_Core employees;
public Employees_Core Employees
{
get
{
return employees;
}
set
{
employees = value;
SetPropertyInstance(employees);
}
}
/// <summary>
/// Upcoming or past events associated with this place or organization.
/// </summary>
private Events_Core events;
public Events_Core Events
{
get
{
return events;
}
set
{
events = value;
SetPropertyInstance(events);
}
}
/// <summary>
/// The fax number.
/// </summary>
private FaxNumber_Core faxNumber;
public FaxNumber_Core FaxNumber
{
get
{
return faxNumber;
}
set
{
faxNumber = value;
SetPropertyInstance(faxNumber);
}
}
/// <summary>
/// A person who founded this organization.
/// </summary>
private Founders_Core founders;
public Founders_Core Founders
{
get
{
return founders;
}
set
{
founders = value;
SetPropertyInstance(founders);
}
}
/// <summary>
/// The date that this organization was founded.
/// </summary>
private FoundingDate_Core foundingDate;
public FoundingDate_Core FoundingDate
{
get
{
return foundingDate;
}
set
{
foundingDate = value;
SetPropertyInstance(foundingDate);
}
}
/// <summary>
/// The geo coordinates of the place.
/// </summary>
private Geo_Core geo;
public Geo_Core Geo
{
get
{
return geo;
}
set
{
geo = value;
SetPropertyInstance(geo);
}
}
/// <summary>
/// URL of an image of the item.
/// </summary>
private Image_Core image;
public Image_Core Image
{
get
{
return image;
}
set
{
image = value;
SetPropertyInstance(image);
}
}
/// <summary>
/// A count of a specific user interactions with this item\u2014for example, <code>20 UserLikes</code>, <code>5 UserComments</code>, or <code>300 UserDownloads</code>. The user interaction type should be one of the sub types of <a href=\http://schema.org/UserInteraction\>UserInteraction</a>.
/// </summary>
private InteractionCount_Core interactionCount;
public InteractionCount_Core InteractionCount
{
get
{
return interactionCount;
}
set
{
interactionCount = value;
SetPropertyInstance(interactionCount);
}
}
/// <summary>
/// The location of the event or organization.
/// </summary>
private Location_Core location;
public Location_Core Location
{
get
{
return location;
}
set
{
location = value;
SetPropertyInstance(location);
}
}
/// <summary>
/// A URL to a map of the place.
/// </summary>
private Maps_Core maps;
public Maps_Core Maps
{
get
{
return maps;
}
set
{
maps = value;
SetPropertyInstance(maps);
}
}
/// <summary>
/// A member of this organization.
/// </summary>
private Members_Core members;
public Members_Core Members
{
get
{
return members;
}
set
{
members = value;
SetPropertyInstance(members);
}
}
/// <summary>
/// The name of the item.
/// </summary>
private Name_Core name;
public Name_Core Name
{
get
{
return name;
}
set
{
name = value;
SetPropertyInstance(name);
}
}
/// <summary>
/// The opening hours for a business. Opening hours can be specified as a weekly time range, starting with days, then times per day. Multiple days can be listed with commas ',' separating each day. Day or time ranges are specified using a hyphen '-'.<br/>- Days are specified using the following two-letter combinations: <code>Mo</code>, <code>Tu</code>, <code>We</code>, <code>Th</code>, <code>Fr</code>, <code>Sa</code>, <code>Su</code>.<br/>- Times are specified using 24:00 time. For example, 3pm is specified as <code>15:00</code>. <br/>- Here is an example: <code><time itemprop=\openingHours\ datetime=\Tu,Th 16:00-20:00\>Tuesdays and Thursdays 4-8pm</time></code>. <br/>- If a business is open 7 days a week, then it can be specified as <code><time itemprop=\openingHours\ datetime=\Mo-Su\>Monday through Sunday, all day</time></code>.
/// </summary>
private OpeningHours_Core openingHours;
public OpeningHours_Core OpeningHours
{
get
{
return openingHours;
}
set
{
openingHours = value;
SetPropertyInstance(openingHours);
}
}
/// <summary>
/// Cash, credit card, etc.
/// </summary>
private PaymentAccepted_Core paymentAccepted;
public PaymentAccepted_Core PaymentAccepted
{
get
{
return paymentAccepted;
}
set
{
paymentAccepted = value;
SetPropertyInstance(paymentAccepted);
}
}
/// <summary>
/// Photographs of this place.
/// </summary>
private Photos_Core photos;
public Photos_Core Photos
{
get
{
return photos;
}
set
{
photos = value;
SetPropertyInstance(photos);
}
}
/// <summary>
/// The price range of the business, for example <code>$$$</code>.
/// </summary>
private PriceRange_Core priceRange;
public PriceRange_Core PriceRange
{
get
{
return priceRange;
}
set
{
priceRange = value;
SetPropertyInstance(priceRange);
}
}
/// <summary>
/// Review of the item.
/// </summary>
private Reviews_Core reviews;
public Reviews_Core Reviews
{
get
{
return reviews;
}
set
{
reviews = value;
SetPropertyInstance(reviews);
}
}
/// <summary>
/// The telephone number.
/// </summary>
private Telephone_Core telephone;
public Telephone_Core Telephone
{
get
{
return telephone;
}
set
{
telephone = value;
SetPropertyInstance(telephone);
}
}
/// <summary>
/// URL of the item.
/// </summary>
private Properties.URL_Core uRL;
public Properties.URL_Core URL
{
get
{
return uRL;
}
set
{
uRL = value;
SetPropertyInstance(uRL);
}
}
}
}
| |
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
namespace Google.Cloud.Gaming.V1.Snippets
{
using Google.Api.Gax;
using Google.Api.Gax.ResourceNames;
using Google.LongRunning;
using Google.Protobuf.WellKnownTypes;
using System;
using System.Linq;
using System.Threading.Tasks;
/// <summary>Generated snippets.</summary>
public sealed class AllGeneratedRealmsServiceClientSnippets
{
/// <summary>Snippet for ListRealms</summary>
public void ListRealmsRequestObject()
{
// Snippet: ListRealms(ListRealmsRequest, CallSettings)
// Create client
RealmsServiceClient realmsServiceClient = RealmsServiceClient.Create();
// Initialize request argument(s)
ListRealmsRequest request = new ListRealmsRequest
{
ParentAsLocationName = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"),
Filter = "",
OrderBy = "",
};
// Make the request
PagedEnumerable<ListRealmsResponse, Realm> response = realmsServiceClient.ListRealms(request);
// Iterate over all response items, lazily performing RPCs as required
foreach (Realm item in response)
{
// Do something with each item
Console.WriteLine(item);
}
// Or iterate over pages (of server-defined size), performing one RPC per page
foreach (ListRealmsResponse page in response.AsRawResponses())
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (Realm item in page)
{
// Do something with each item
Console.WriteLine(item);
}
}
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<Realm> singlePage = response.ReadPage(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (Realm item in singlePage)
{
// Do something with each item
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
// End snippet
}
/// <summary>Snippet for ListRealmsAsync</summary>
public async Task ListRealmsRequestObjectAsync()
{
// Snippet: ListRealmsAsync(ListRealmsRequest, CallSettings)
// Create client
RealmsServiceClient realmsServiceClient = await RealmsServiceClient.CreateAsync();
// Initialize request argument(s)
ListRealmsRequest request = new ListRealmsRequest
{
ParentAsLocationName = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"),
Filter = "",
OrderBy = "",
};
// Make the request
PagedAsyncEnumerable<ListRealmsResponse, Realm> response = realmsServiceClient.ListRealmsAsync(request);
// Iterate over all response items, lazily performing RPCs as required
await response.ForEachAsync((Realm item) =>
{
// Do something with each item
Console.WriteLine(item);
});
// Or iterate over pages (of server-defined size), performing one RPC per page
await response.AsRawResponses().ForEachAsync((ListRealmsResponse page) =>
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (Realm item in page)
{
// Do something with each item
Console.WriteLine(item);
}
});
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<Realm> singlePage = await response.ReadPageAsync(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (Realm item in singlePage)
{
// Do something with each item
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
// End snippet
}
/// <summary>Snippet for ListRealms</summary>
public void ListRealms()
{
// Snippet: ListRealms(string, string, int?, CallSettings)
// Create client
RealmsServiceClient realmsServiceClient = RealmsServiceClient.Create();
// Initialize request argument(s)
string parent = "projects/[PROJECT]/locations/[LOCATION]";
// Make the request
PagedEnumerable<ListRealmsResponse, Realm> response = realmsServiceClient.ListRealms(parent);
// Iterate over all response items, lazily performing RPCs as required
foreach (Realm item in response)
{
// Do something with each item
Console.WriteLine(item);
}
// Or iterate over pages (of server-defined size), performing one RPC per page
foreach (ListRealmsResponse page in response.AsRawResponses())
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (Realm item in page)
{
// Do something with each item
Console.WriteLine(item);
}
}
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<Realm> singlePage = response.ReadPage(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (Realm item in singlePage)
{
// Do something with each item
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
// End snippet
}
/// <summary>Snippet for ListRealmsAsync</summary>
public async Task ListRealmsAsync()
{
// Snippet: ListRealmsAsync(string, string, int?, CallSettings)
// Create client
RealmsServiceClient realmsServiceClient = await RealmsServiceClient.CreateAsync();
// Initialize request argument(s)
string parent = "projects/[PROJECT]/locations/[LOCATION]";
// Make the request
PagedAsyncEnumerable<ListRealmsResponse, Realm> response = realmsServiceClient.ListRealmsAsync(parent);
// Iterate over all response items, lazily performing RPCs as required
await response.ForEachAsync((Realm item) =>
{
// Do something with each item
Console.WriteLine(item);
});
// Or iterate over pages (of server-defined size), performing one RPC per page
await response.AsRawResponses().ForEachAsync((ListRealmsResponse page) =>
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (Realm item in page)
{
// Do something with each item
Console.WriteLine(item);
}
});
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<Realm> singlePage = await response.ReadPageAsync(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (Realm item in singlePage)
{
// Do something with each item
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
// End snippet
}
/// <summary>Snippet for ListRealms</summary>
public void ListRealmsResourceNames()
{
// Snippet: ListRealms(LocationName, string, int?, CallSettings)
// Create client
RealmsServiceClient realmsServiceClient = RealmsServiceClient.Create();
// Initialize request argument(s)
LocationName parent = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]");
// Make the request
PagedEnumerable<ListRealmsResponse, Realm> response = realmsServiceClient.ListRealms(parent);
// Iterate over all response items, lazily performing RPCs as required
foreach (Realm item in response)
{
// Do something with each item
Console.WriteLine(item);
}
// Or iterate over pages (of server-defined size), performing one RPC per page
foreach (ListRealmsResponse page in response.AsRawResponses())
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (Realm item in page)
{
// Do something with each item
Console.WriteLine(item);
}
}
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<Realm> singlePage = response.ReadPage(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (Realm item in singlePage)
{
// Do something with each item
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
// End snippet
}
/// <summary>Snippet for ListRealmsAsync</summary>
public async Task ListRealmsResourceNamesAsync()
{
// Snippet: ListRealmsAsync(LocationName, string, int?, CallSettings)
// Create client
RealmsServiceClient realmsServiceClient = await RealmsServiceClient.CreateAsync();
// Initialize request argument(s)
LocationName parent = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]");
// Make the request
PagedAsyncEnumerable<ListRealmsResponse, Realm> response = realmsServiceClient.ListRealmsAsync(parent);
// Iterate over all response items, lazily performing RPCs as required
await response.ForEachAsync((Realm item) =>
{
// Do something with each item
Console.WriteLine(item);
});
// Or iterate over pages (of server-defined size), performing one RPC per page
await response.AsRawResponses().ForEachAsync((ListRealmsResponse page) =>
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (Realm item in page)
{
// Do something with each item
Console.WriteLine(item);
}
});
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<Realm> singlePage = await response.ReadPageAsync(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (Realm item in singlePage)
{
// Do something with each item
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
// End snippet
}
/// <summary>Snippet for GetRealm</summary>
public void GetRealmRequestObject()
{
// Snippet: GetRealm(GetRealmRequest, CallSettings)
// Create client
RealmsServiceClient realmsServiceClient = RealmsServiceClient.Create();
// Initialize request argument(s)
GetRealmRequest request = new GetRealmRequest
{
RealmName = RealmName.FromProjectLocationRealm("[PROJECT]", "[LOCATION]", "[REALM]"),
};
// Make the request
Realm response = realmsServiceClient.GetRealm(request);
// End snippet
}
/// <summary>Snippet for GetRealmAsync</summary>
public async Task GetRealmRequestObjectAsync()
{
// Snippet: GetRealmAsync(GetRealmRequest, CallSettings)
// Additional: GetRealmAsync(GetRealmRequest, CancellationToken)
// Create client
RealmsServiceClient realmsServiceClient = await RealmsServiceClient.CreateAsync();
// Initialize request argument(s)
GetRealmRequest request = new GetRealmRequest
{
RealmName = RealmName.FromProjectLocationRealm("[PROJECT]", "[LOCATION]", "[REALM]"),
};
// Make the request
Realm response = await realmsServiceClient.GetRealmAsync(request);
// End snippet
}
/// <summary>Snippet for GetRealm</summary>
public void GetRealm()
{
// Snippet: GetRealm(string, CallSettings)
// Create client
RealmsServiceClient realmsServiceClient = RealmsServiceClient.Create();
// Initialize request argument(s)
string name = "projects/[PROJECT]/locations/[LOCATION]/realms/[REALM]";
// Make the request
Realm response = realmsServiceClient.GetRealm(name);
// End snippet
}
/// <summary>Snippet for GetRealmAsync</summary>
public async Task GetRealmAsync()
{
// Snippet: GetRealmAsync(string, CallSettings)
// Additional: GetRealmAsync(string, CancellationToken)
// Create client
RealmsServiceClient realmsServiceClient = await RealmsServiceClient.CreateAsync();
// Initialize request argument(s)
string name = "projects/[PROJECT]/locations/[LOCATION]/realms/[REALM]";
// Make the request
Realm response = await realmsServiceClient.GetRealmAsync(name);
// End snippet
}
/// <summary>Snippet for GetRealm</summary>
public void GetRealmResourceNames()
{
// Snippet: GetRealm(RealmName, CallSettings)
// Create client
RealmsServiceClient realmsServiceClient = RealmsServiceClient.Create();
// Initialize request argument(s)
RealmName name = RealmName.FromProjectLocationRealm("[PROJECT]", "[LOCATION]", "[REALM]");
// Make the request
Realm response = realmsServiceClient.GetRealm(name);
// End snippet
}
/// <summary>Snippet for GetRealmAsync</summary>
public async Task GetRealmResourceNamesAsync()
{
// Snippet: GetRealmAsync(RealmName, CallSettings)
// Additional: GetRealmAsync(RealmName, CancellationToken)
// Create client
RealmsServiceClient realmsServiceClient = await RealmsServiceClient.CreateAsync();
// Initialize request argument(s)
RealmName name = RealmName.FromProjectLocationRealm("[PROJECT]", "[LOCATION]", "[REALM]");
// Make the request
Realm response = await realmsServiceClient.GetRealmAsync(name);
// End snippet
}
/// <summary>Snippet for CreateRealm</summary>
public void CreateRealmRequestObject()
{
// Snippet: CreateRealm(CreateRealmRequest, CallSettings)
// Create client
RealmsServiceClient realmsServiceClient = RealmsServiceClient.Create();
// Initialize request argument(s)
CreateRealmRequest request = new CreateRealmRequest
{
ParentAsLocationName = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"),
RealmId = "",
Realm = new Realm(),
};
// Make the request
Operation<Realm, OperationMetadata> response = realmsServiceClient.CreateRealm(request);
// Poll until the returned long-running operation is complete
Operation<Realm, OperationMetadata> completedResponse = response.PollUntilCompleted();
// Retrieve the operation result
Realm result = completedResponse.Result;
// Or get the name of the operation
string operationName = response.Name;
// This name can be stored, then the long-running operation retrieved later by name
Operation<Realm, OperationMetadata> retrievedResponse = realmsServiceClient.PollOnceCreateRealm(operationName);
// Check if the retrieved long-running operation has completed
if (retrievedResponse.IsCompleted)
{
// If it has completed, then access the result
Realm retrievedResult = retrievedResponse.Result;
}
// End snippet
}
/// <summary>Snippet for CreateRealmAsync</summary>
public async Task CreateRealmRequestObjectAsync()
{
// Snippet: CreateRealmAsync(CreateRealmRequest, CallSettings)
// Additional: CreateRealmAsync(CreateRealmRequest, CancellationToken)
// Create client
RealmsServiceClient realmsServiceClient = await RealmsServiceClient.CreateAsync();
// Initialize request argument(s)
CreateRealmRequest request = new CreateRealmRequest
{
ParentAsLocationName = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"),
RealmId = "",
Realm = new Realm(),
};
// Make the request
Operation<Realm, OperationMetadata> response = await realmsServiceClient.CreateRealmAsync(request);
// Poll until the returned long-running operation is complete
Operation<Realm, OperationMetadata> completedResponse = await response.PollUntilCompletedAsync();
// Retrieve the operation result
Realm result = completedResponse.Result;
// Or get the name of the operation
string operationName = response.Name;
// This name can be stored, then the long-running operation retrieved later by name
Operation<Realm, OperationMetadata> retrievedResponse = await realmsServiceClient.PollOnceCreateRealmAsync(operationName);
// Check if the retrieved long-running operation has completed
if (retrievedResponse.IsCompleted)
{
// If it has completed, then access the result
Realm retrievedResult = retrievedResponse.Result;
}
// End snippet
}
/// <summary>Snippet for CreateRealm</summary>
public void CreateRealm()
{
// Snippet: CreateRealm(string, Realm, string, CallSettings)
// Create client
RealmsServiceClient realmsServiceClient = RealmsServiceClient.Create();
// Initialize request argument(s)
string parent = "projects/[PROJECT]/locations/[LOCATION]";
Realm realm = new Realm();
string realmId = "";
// Make the request
Operation<Realm, OperationMetadata> response = realmsServiceClient.CreateRealm(parent, realm, realmId);
// Poll until the returned long-running operation is complete
Operation<Realm, OperationMetadata> completedResponse = response.PollUntilCompleted();
// Retrieve the operation result
Realm result = completedResponse.Result;
// Or get the name of the operation
string operationName = response.Name;
// This name can be stored, then the long-running operation retrieved later by name
Operation<Realm, OperationMetadata> retrievedResponse = realmsServiceClient.PollOnceCreateRealm(operationName);
// Check if the retrieved long-running operation has completed
if (retrievedResponse.IsCompleted)
{
// If it has completed, then access the result
Realm retrievedResult = retrievedResponse.Result;
}
// End snippet
}
/// <summary>Snippet for CreateRealmAsync</summary>
public async Task CreateRealmAsync()
{
// Snippet: CreateRealmAsync(string, Realm, string, CallSettings)
// Additional: CreateRealmAsync(string, Realm, string, CancellationToken)
// Create client
RealmsServiceClient realmsServiceClient = await RealmsServiceClient.CreateAsync();
// Initialize request argument(s)
string parent = "projects/[PROJECT]/locations/[LOCATION]";
Realm realm = new Realm();
string realmId = "";
// Make the request
Operation<Realm, OperationMetadata> response = await realmsServiceClient.CreateRealmAsync(parent, realm, realmId);
// Poll until the returned long-running operation is complete
Operation<Realm, OperationMetadata> completedResponse = await response.PollUntilCompletedAsync();
// Retrieve the operation result
Realm result = completedResponse.Result;
// Or get the name of the operation
string operationName = response.Name;
// This name can be stored, then the long-running operation retrieved later by name
Operation<Realm, OperationMetadata> retrievedResponse = await realmsServiceClient.PollOnceCreateRealmAsync(operationName);
// Check if the retrieved long-running operation has completed
if (retrievedResponse.IsCompleted)
{
// If it has completed, then access the result
Realm retrievedResult = retrievedResponse.Result;
}
// End snippet
}
/// <summary>Snippet for CreateRealm</summary>
public void CreateRealmResourceNames()
{
// Snippet: CreateRealm(LocationName, Realm, string, CallSettings)
// Create client
RealmsServiceClient realmsServiceClient = RealmsServiceClient.Create();
// Initialize request argument(s)
LocationName parent = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]");
Realm realm = new Realm();
string realmId = "";
// Make the request
Operation<Realm, OperationMetadata> response = realmsServiceClient.CreateRealm(parent, realm, realmId);
// Poll until the returned long-running operation is complete
Operation<Realm, OperationMetadata> completedResponse = response.PollUntilCompleted();
// Retrieve the operation result
Realm result = completedResponse.Result;
// Or get the name of the operation
string operationName = response.Name;
// This name can be stored, then the long-running operation retrieved later by name
Operation<Realm, OperationMetadata> retrievedResponse = realmsServiceClient.PollOnceCreateRealm(operationName);
// Check if the retrieved long-running operation has completed
if (retrievedResponse.IsCompleted)
{
// If it has completed, then access the result
Realm retrievedResult = retrievedResponse.Result;
}
// End snippet
}
/// <summary>Snippet for CreateRealmAsync</summary>
public async Task CreateRealmResourceNamesAsync()
{
// Snippet: CreateRealmAsync(LocationName, Realm, string, CallSettings)
// Additional: CreateRealmAsync(LocationName, Realm, string, CancellationToken)
// Create client
RealmsServiceClient realmsServiceClient = await RealmsServiceClient.CreateAsync();
// Initialize request argument(s)
LocationName parent = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]");
Realm realm = new Realm();
string realmId = "";
// Make the request
Operation<Realm, OperationMetadata> response = await realmsServiceClient.CreateRealmAsync(parent, realm, realmId);
// Poll until the returned long-running operation is complete
Operation<Realm, OperationMetadata> completedResponse = await response.PollUntilCompletedAsync();
// Retrieve the operation result
Realm result = completedResponse.Result;
// Or get the name of the operation
string operationName = response.Name;
// This name can be stored, then the long-running operation retrieved later by name
Operation<Realm, OperationMetadata> retrievedResponse = await realmsServiceClient.PollOnceCreateRealmAsync(operationName);
// Check if the retrieved long-running operation has completed
if (retrievedResponse.IsCompleted)
{
// If it has completed, then access the result
Realm retrievedResult = retrievedResponse.Result;
}
// End snippet
}
/// <summary>Snippet for DeleteRealm</summary>
public void DeleteRealmRequestObject()
{
// Snippet: DeleteRealm(DeleteRealmRequest, CallSettings)
// Create client
RealmsServiceClient realmsServiceClient = RealmsServiceClient.Create();
// Initialize request argument(s)
DeleteRealmRequest request = new DeleteRealmRequest
{
RealmName = RealmName.FromProjectLocationRealm("[PROJECT]", "[LOCATION]", "[REALM]"),
};
// Make the request
Operation<Empty, OperationMetadata> response = realmsServiceClient.DeleteRealm(request);
// Poll until the returned long-running operation is complete
Operation<Empty, OperationMetadata> completedResponse = response.PollUntilCompleted();
// Retrieve the operation result
Empty result = completedResponse.Result;
// Or get the name of the operation
string operationName = response.Name;
// This name can be stored, then the long-running operation retrieved later by name
Operation<Empty, OperationMetadata> retrievedResponse = realmsServiceClient.PollOnceDeleteRealm(operationName);
// Check if the retrieved long-running operation has completed
if (retrievedResponse.IsCompleted)
{
// If it has completed, then access the result
Empty retrievedResult = retrievedResponse.Result;
}
// End snippet
}
/// <summary>Snippet for DeleteRealmAsync</summary>
public async Task DeleteRealmRequestObjectAsync()
{
// Snippet: DeleteRealmAsync(DeleteRealmRequest, CallSettings)
// Additional: DeleteRealmAsync(DeleteRealmRequest, CancellationToken)
// Create client
RealmsServiceClient realmsServiceClient = await RealmsServiceClient.CreateAsync();
// Initialize request argument(s)
DeleteRealmRequest request = new DeleteRealmRequest
{
RealmName = RealmName.FromProjectLocationRealm("[PROJECT]", "[LOCATION]", "[REALM]"),
};
// Make the request
Operation<Empty, OperationMetadata> response = await realmsServiceClient.DeleteRealmAsync(request);
// Poll until the returned long-running operation is complete
Operation<Empty, OperationMetadata> completedResponse = await response.PollUntilCompletedAsync();
// Retrieve the operation result
Empty result = completedResponse.Result;
// Or get the name of the operation
string operationName = response.Name;
// This name can be stored, then the long-running operation retrieved later by name
Operation<Empty, OperationMetadata> retrievedResponse = await realmsServiceClient.PollOnceDeleteRealmAsync(operationName);
// Check if the retrieved long-running operation has completed
if (retrievedResponse.IsCompleted)
{
// If it has completed, then access the result
Empty retrievedResult = retrievedResponse.Result;
}
// End snippet
}
/// <summary>Snippet for DeleteRealm</summary>
public void DeleteRealm()
{
// Snippet: DeleteRealm(string, CallSettings)
// Create client
RealmsServiceClient realmsServiceClient = RealmsServiceClient.Create();
// Initialize request argument(s)
string name = "projects/[PROJECT]/locations/[LOCATION]/realms/[REALM]";
// Make the request
Operation<Empty, OperationMetadata> response = realmsServiceClient.DeleteRealm(name);
// Poll until the returned long-running operation is complete
Operation<Empty, OperationMetadata> completedResponse = response.PollUntilCompleted();
// Retrieve the operation result
Empty result = completedResponse.Result;
// Or get the name of the operation
string operationName = response.Name;
// This name can be stored, then the long-running operation retrieved later by name
Operation<Empty, OperationMetadata> retrievedResponse = realmsServiceClient.PollOnceDeleteRealm(operationName);
// Check if the retrieved long-running operation has completed
if (retrievedResponse.IsCompleted)
{
// If it has completed, then access the result
Empty retrievedResult = retrievedResponse.Result;
}
// End snippet
}
/// <summary>Snippet for DeleteRealmAsync</summary>
public async Task DeleteRealmAsync()
{
// Snippet: DeleteRealmAsync(string, CallSettings)
// Additional: DeleteRealmAsync(string, CancellationToken)
// Create client
RealmsServiceClient realmsServiceClient = await RealmsServiceClient.CreateAsync();
// Initialize request argument(s)
string name = "projects/[PROJECT]/locations/[LOCATION]/realms/[REALM]";
// Make the request
Operation<Empty, OperationMetadata> response = await realmsServiceClient.DeleteRealmAsync(name);
// Poll until the returned long-running operation is complete
Operation<Empty, OperationMetadata> completedResponse = await response.PollUntilCompletedAsync();
// Retrieve the operation result
Empty result = completedResponse.Result;
// Or get the name of the operation
string operationName = response.Name;
// This name can be stored, then the long-running operation retrieved later by name
Operation<Empty, OperationMetadata> retrievedResponse = await realmsServiceClient.PollOnceDeleteRealmAsync(operationName);
// Check if the retrieved long-running operation has completed
if (retrievedResponse.IsCompleted)
{
// If it has completed, then access the result
Empty retrievedResult = retrievedResponse.Result;
}
// End snippet
}
/// <summary>Snippet for DeleteRealm</summary>
public void DeleteRealmResourceNames()
{
// Snippet: DeleteRealm(RealmName, CallSettings)
// Create client
RealmsServiceClient realmsServiceClient = RealmsServiceClient.Create();
// Initialize request argument(s)
RealmName name = RealmName.FromProjectLocationRealm("[PROJECT]", "[LOCATION]", "[REALM]");
// Make the request
Operation<Empty, OperationMetadata> response = realmsServiceClient.DeleteRealm(name);
// Poll until the returned long-running operation is complete
Operation<Empty, OperationMetadata> completedResponse = response.PollUntilCompleted();
// Retrieve the operation result
Empty result = completedResponse.Result;
// Or get the name of the operation
string operationName = response.Name;
// This name can be stored, then the long-running operation retrieved later by name
Operation<Empty, OperationMetadata> retrievedResponse = realmsServiceClient.PollOnceDeleteRealm(operationName);
// Check if the retrieved long-running operation has completed
if (retrievedResponse.IsCompleted)
{
// If it has completed, then access the result
Empty retrievedResult = retrievedResponse.Result;
}
// End snippet
}
/// <summary>Snippet for DeleteRealmAsync</summary>
public async Task DeleteRealmResourceNamesAsync()
{
// Snippet: DeleteRealmAsync(RealmName, CallSettings)
// Additional: DeleteRealmAsync(RealmName, CancellationToken)
// Create client
RealmsServiceClient realmsServiceClient = await RealmsServiceClient.CreateAsync();
// Initialize request argument(s)
RealmName name = RealmName.FromProjectLocationRealm("[PROJECT]", "[LOCATION]", "[REALM]");
// Make the request
Operation<Empty, OperationMetadata> response = await realmsServiceClient.DeleteRealmAsync(name);
// Poll until the returned long-running operation is complete
Operation<Empty, OperationMetadata> completedResponse = await response.PollUntilCompletedAsync();
// Retrieve the operation result
Empty result = completedResponse.Result;
// Or get the name of the operation
string operationName = response.Name;
// This name can be stored, then the long-running operation retrieved later by name
Operation<Empty, OperationMetadata> retrievedResponse = await realmsServiceClient.PollOnceDeleteRealmAsync(operationName);
// Check if the retrieved long-running operation has completed
if (retrievedResponse.IsCompleted)
{
// If it has completed, then access the result
Empty retrievedResult = retrievedResponse.Result;
}
// End snippet
}
/// <summary>Snippet for UpdateRealm</summary>
public void UpdateRealmRequestObject()
{
// Snippet: UpdateRealm(UpdateRealmRequest, CallSettings)
// Create client
RealmsServiceClient realmsServiceClient = RealmsServiceClient.Create();
// Initialize request argument(s)
UpdateRealmRequest request = new UpdateRealmRequest
{
Realm = new Realm(),
UpdateMask = new FieldMask(),
};
// Make the request
Operation<Realm, OperationMetadata> response = realmsServiceClient.UpdateRealm(request);
// Poll until the returned long-running operation is complete
Operation<Realm, OperationMetadata> completedResponse = response.PollUntilCompleted();
// Retrieve the operation result
Realm result = completedResponse.Result;
// Or get the name of the operation
string operationName = response.Name;
// This name can be stored, then the long-running operation retrieved later by name
Operation<Realm, OperationMetadata> retrievedResponse = realmsServiceClient.PollOnceUpdateRealm(operationName);
// Check if the retrieved long-running operation has completed
if (retrievedResponse.IsCompleted)
{
// If it has completed, then access the result
Realm retrievedResult = retrievedResponse.Result;
}
// End snippet
}
/// <summary>Snippet for UpdateRealmAsync</summary>
public async Task UpdateRealmRequestObjectAsync()
{
// Snippet: UpdateRealmAsync(UpdateRealmRequest, CallSettings)
// Additional: UpdateRealmAsync(UpdateRealmRequest, CancellationToken)
// Create client
RealmsServiceClient realmsServiceClient = await RealmsServiceClient.CreateAsync();
// Initialize request argument(s)
UpdateRealmRequest request = new UpdateRealmRequest
{
Realm = new Realm(),
UpdateMask = new FieldMask(),
};
// Make the request
Operation<Realm, OperationMetadata> response = await realmsServiceClient.UpdateRealmAsync(request);
// Poll until the returned long-running operation is complete
Operation<Realm, OperationMetadata> completedResponse = await response.PollUntilCompletedAsync();
// Retrieve the operation result
Realm result = completedResponse.Result;
// Or get the name of the operation
string operationName = response.Name;
// This name can be stored, then the long-running operation retrieved later by name
Operation<Realm, OperationMetadata> retrievedResponse = await realmsServiceClient.PollOnceUpdateRealmAsync(operationName);
// Check if the retrieved long-running operation has completed
if (retrievedResponse.IsCompleted)
{
// If it has completed, then access the result
Realm retrievedResult = retrievedResponse.Result;
}
// End snippet
}
/// <summary>Snippet for UpdateRealm</summary>
public void UpdateRealm()
{
// Snippet: UpdateRealm(Realm, FieldMask, CallSettings)
// Create client
RealmsServiceClient realmsServiceClient = RealmsServiceClient.Create();
// Initialize request argument(s)
Realm realm = new Realm();
FieldMask updateMask = new FieldMask();
// Make the request
Operation<Realm, OperationMetadata> response = realmsServiceClient.UpdateRealm(realm, updateMask);
// Poll until the returned long-running operation is complete
Operation<Realm, OperationMetadata> completedResponse = response.PollUntilCompleted();
// Retrieve the operation result
Realm result = completedResponse.Result;
// Or get the name of the operation
string operationName = response.Name;
// This name can be stored, then the long-running operation retrieved later by name
Operation<Realm, OperationMetadata> retrievedResponse = realmsServiceClient.PollOnceUpdateRealm(operationName);
// Check if the retrieved long-running operation has completed
if (retrievedResponse.IsCompleted)
{
// If it has completed, then access the result
Realm retrievedResult = retrievedResponse.Result;
}
// End snippet
}
/// <summary>Snippet for UpdateRealmAsync</summary>
public async Task UpdateRealmAsync()
{
// Snippet: UpdateRealmAsync(Realm, FieldMask, CallSettings)
// Additional: UpdateRealmAsync(Realm, FieldMask, CancellationToken)
// Create client
RealmsServiceClient realmsServiceClient = await RealmsServiceClient.CreateAsync();
// Initialize request argument(s)
Realm realm = new Realm();
FieldMask updateMask = new FieldMask();
// Make the request
Operation<Realm, OperationMetadata> response = await realmsServiceClient.UpdateRealmAsync(realm, updateMask);
// Poll until the returned long-running operation is complete
Operation<Realm, OperationMetadata> completedResponse = await response.PollUntilCompletedAsync();
// Retrieve the operation result
Realm result = completedResponse.Result;
// Or get the name of the operation
string operationName = response.Name;
// This name can be stored, then the long-running operation retrieved later by name
Operation<Realm, OperationMetadata> retrievedResponse = await realmsServiceClient.PollOnceUpdateRealmAsync(operationName);
// Check if the retrieved long-running operation has completed
if (retrievedResponse.IsCompleted)
{
// If it has completed, then access the result
Realm retrievedResult = retrievedResponse.Result;
}
// End snippet
}
/// <summary>Snippet for PreviewRealmUpdate</summary>
public void PreviewRealmUpdateRequestObject()
{
// Snippet: PreviewRealmUpdate(PreviewRealmUpdateRequest, CallSettings)
// Create client
RealmsServiceClient realmsServiceClient = RealmsServiceClient.Create();
// Initialize request argument(s)
PreviewRealmUpdateRequest request = new PreviewRealmUpdateRequest
{
Realm = new Realm(),
UpdateMask = new FieldMask(),
PreviewTime = new Timestamp(),
};
// Make the request
PreviewRealmUpdateResponse response = realmsServiceClient.PreviewRealmUpdate(request);
// End snippet
}
/// <summary>Snippet for PreviewRealmUpdateAsync</summary>
public async Task PreviewRealmUpdateRequestObjectAsync()
{
// Snippet: PreviewRealmUpdateAsync(PreviewRealmUpdateRequest, CallSettings)
// Additional: PreviewRealmUpdateAsync(PreviewRealmUpdateRequest, CancellationToken)
// Create client
RealmsServiceClient realmsServiceClient = await RealmsServiceClient.CreateAsync();
// Initialize request argument(s)
PreviewRealmUpdateRequest request = new PreviewRealmUpdateRequest
{
Realm = new Realm(),
UpdateMask = new FieldMask(),
PreviewTime = new Timestamp(),
};
// Make the request
PreviewRealmUpdateResponse response = await realmsServiceClient.PreviewRealmUpdateAsync(request);
// End snippet
}
}
}
| |
//WordSlide
//Copyright (C) 2008-2012 Jonathan Ray <[email protected]>
//WordSlide is free software: you can redistribute it and/or modify
//it under the terms of the GNU General Public License as published by
//the Free Software Foundation, either version 3 of the License, or
//(at your option) any later version.
//This program is distributed in the hope that it will be useful,
//but WITHOUT ANY WARRANTY; without even the implied warranty of
//MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
//GNU General Public License for more details.
//A copy of the GNU General Public License should be in the
//Installer directory of this source tree. If not, see
//<http://www.gnu.org/licenses/>.
using System;
using System.Collections.Generic;
using System.Text;
using System.Xml;
using System.IO;
namespace WordSlideEngine
{
/// <summary>
/// The EditableSlideSet class is able to load and store data about a slide set for editing in the
/// editor. For the version of this class used in the slideshow, see DisplaySlideSet.
/// </summary>
public class EditableSlideSet : SlideSet
{
/// <summary>
/// Provides access to the text of the name of the slide set.
/// </summary>
public string Name { get { return name; } set { name = value.Trim(); } }
/// <summary>
/// Provides access to the text of the byline of the slide set.
/// </summary>
public string Byline { get { return byline; } set { byline = value.Trim(); } }
/// <summary>
/// Provides access to the text of the copyright line of the slide set.
/// </summary>
public string Copyright { get { return copyright; } set { copyright = value.Trim(); } }
/// <summary>
/// Provides access to the value of the lines per slide property of the slide set.
/// </summary>
public int LinesPerSlide { get { return linesperslide; } set { linesperslide = value; } }
/// <summary>
/// Provides access to the index of the chorus text of the slide set.
/// </summary>
public int Chorus { get { return chorus; } set { chorus = value; } }
public string Path { get { return path; } }
/// <summary>
/// Provides access to the texts of the slides of the slide set.
/// </summary>
public string[] Texts { get { return texts; } }
/// <summary>
/// Provides access to the order of the indices of the texts of the slides of the slide set.
/// </summary>
public int[] Order { get { return order; } }
public int[] LocalLinesPerSlide { get { return locallinesperslide; } }
/// <summary>
/// This constructor creates a new, empty EditableSlideSet, ready to be filled with data.
/// </summary>
public EditableSlideSet()
{
path = "";
name = "";
byline = "";
copyright = "";
texts = new string[1];
texts[0] = "";
order = new int[1];
order[0] = 0;
locallinesperslide = new int[1];
locallinesperslide[0] = 0;
chorus = -1;
linesperslide = 4;
}
/// <summary>
/// This constructor, used in the open box of the editor, opens the requested slide set file
/// and gets the name of the file, leaving the other properties uninitialized. Intended to
/// fill in the other properties with the loadFile function.
/// </summary>
/// <param name="filepath">The path to the desired slide set.</param>
public EditableSlideSet(string filepath)
{
path = filepath;
getTitle();
}
public static string getNewPath(string name)
{
string filename = "";
char[] namearray = name.ToCharArray();
for (int x = 0; x < namearray.Length; x++)
{
if (Char.IsLetterOrDigit(namearray[x]))
{
filename += namearray[x];
}
}
return System.IO.Path.Combine(Engine.SlideDirectory, filename + ".sld");
}
/// <summary>
/// Rebuilds the value of the path of the slide set. To be called when the slide set is
/// renamed, and a new file needs to be generated with the new title.
/// </summary>
public void resetPath()
{
path = getNewPath(name);
}
/// <summary>
/// Initialize the arrays holding the slide texts and the order of slides.
/// </summary>
/// <param name="tcount">The number of unique slides in the set.</param>
/// <param name="ocount">The number of slides in the order of the set.</param>
public void setupTexts(int tcount, int ocount)
{
texts = new string[tcount];
order = new int[ocount];
locallinesperslide = new int[tcount];
}
/// <summary>
/// Set the text of the indicated slide to the indicated text.
/// </summary>
/// <param name="index">The index of the slide text to overwrite.</param>
/// <param name="text">The new text of the indicated slide.</param>
public void addText(int index, string text, int llps)
{
texts[index] = text.Trim();
locallinesperslide[index] = llps;
}
/// <summary>
/// Set the value of the slide order to the provided value.
/// </summary>
/// <param name="index">The index of the slide order value to overwrite.</param>
/// <param name="slide">The new value of the indicated slide order value.</param>
public void addOrder(int index, int slide)
{
order[index] = slide;
}
private void generateXmlDocument(bool delete)
{
source = new XmlDocument();
if (delete && path.StartsWith(Engine.SlideDirectory))
{
System.IO.File.Delete(path);
}
resetPath();
XmlNode setnode = source.CreateNode(XmlNodeType.Element, "set", "");
XmlNode title = source.CreateElement("title");
title.InnerText = name;
XmlNode by = source.CreateElement("by");
by.InnerText = byline;
XmlNode cr = source.CreateElement("copyright");
cr.InnerText = copyright;
XmlNode lps = source.CreateElement("lps");
lps.InnerText = linesperslide.ToString();
setnode.AppendChild(title);
setnode.AppendChild(by);
setnode.AppendChild(cr);
setnode.AppendChild(lps);
if (chorus != -1)
{
XmlNode chi = source.CreateElement("chorus");
chi.InnerText = chorus.ToString();
setnode.AppendChild(chi);
}
for (int x = 0; x < texts.Length; x++)
{
XmlNode frame = source.CreateElement("frame");
XmlNode frameid = source.CreateElement("id");
frameid.InnerText = x.ToString();
XmlNode frametext = source.CreateElement("text");
frametext.InnerText = texts[x];
frame.AppendChild(frameid);
frame.AppendChild(frametext);
setnode.AppendChild(frame);
}
XmlNode frameorder = source.CreateElement("order");
string ordertext = "";
for (int x = 0; x < order.Length; x++)
{
ordertext += order[x];
if (x < order.Length - 1)
{
ordertext += ":";
}
}
frameorder.InnerText = ordertext;
setnode.AppendChild(frameorder);
XmlNode llps = source.CreateElement("llps");
string llpstext = "";
for (int x = 0; x < locallinesperslide.Length; x++)
{
llpstext += locallinesperslide[x];
if (x < locallinesperslide.Length - 1)
{
llpstext += ":";
}
}
llps.InnerText = llpstext;
setnode.AppendChild(llps);
source.AppendChild(setnode);
}
public string getWriteText()
{
generateXmlDocument(false);
StringWriter sw = new StringWriter();
XmlTextWriter xtw = new XmlTextWriter(sw);
source.WriteTo(xtw);
return sw.ToString();
}
/// <summary>
/// Write the current slide set to file.
/// </summary>
public void Write()
{
generateXmlDocument(true);
source.Save(path);
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.Text;
using Roslyn.Test.Utilities;
using Xunit;
namespace Microsoft.CodeAnalysis.Editor.CSharp.UnitTests.Recommendations
{
public class ClassKeywordRecommenderTests : KeywordRecommenderTests
{
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AtRoot_Interactive()
{
VerifyKeyword(SourceCodeKind.Script,
@"$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterClass_Interactive()
{
VerifyKeyword(SourceCodeKind.Script,
@"class C { }
$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterGlobalStatement_Interactive()
{
VerifyKeyword(SourceCodeKind.Script,
@"System.Console.WriteLine();
$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterGlobalVariableDeclaration_Interactive()
{
VerifyKeyword(SourceCodeKind.Script,
@"int i = 0;
$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void NotInUsingAlias()
{
VerifyAbsence(
@"using Foo = $$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void NotInEmptyStatement()
{
VerifyAbsence(AddInsideMethod(
@"$$"));
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void InCompilationUnit()
{
VerifyKeyword(
@"$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterExtern()
{
VerifyKeyword(
@"extern alias Foo;
$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterUsing()
{
VerifyKeyword(
@"using Foo;
$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterNamespace()
{
VerifyKeyword(
@"namespace N {}
$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterTypeDeclaration()
{
VerifyKeyword(
@"class C {}
$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterDelegateDeclaration()
{
VerifyKeyword(
@"delegate void Foo();
$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterMethod()
{
VerifyKeyword(
@"class C {
void Foo() {}
$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterField()
{
VerifyKeyword(
@"class C {
int i;
$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterProperty()
{
VerifyKeyword(
@"class C {
int i { get; }
$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void NotBeforeUsing()
{
VerifyAbsence(SourceCodeKind.Regular,
@"$$
using Foo;");
}
[WpfFact(Skip = "528041"), Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void NotBeforeUsing_Interactive()
{
VerifyAbsence(SourceCodeKind.Script,
@"$$
using Foo;");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterAssemblyAttribute()
{
VerifyKeyword(
@"[assembly: foo]
$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterRootAttribute()
{
VerifyKeyword(
@"[foo]
$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterNestedAttribute()
{
VerifyKeyword(
@"class C {
[foo]
$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void InsideStruct()
{
VerifyKeyword(
@"struct S {
$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void NotInsideInterface()
{
VerifyAbsence(@"interface I {
$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void InsideClass()
{
VerifyKeyword(
@"class C {
$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterPartial()
{
VerifyKeyword(
@"partial $$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterAbstract()
{
VerifyKeyword(
@"abstract $$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterInternal()
{
VerifyKeyword(
@"internal $$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterStaticPublic()
{
VerifyKeyword(
@"static public $$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterPublicStatic()
{
VerifyKeyword(
@"public static $$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void NotAfterInvalidPublic()
{
VerifyAbsence(@"virtual public $$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterPublic()
{
VerifyKeyword(
@"public $$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterPrivate()
{
VerifyKeyword(
@"private $$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterProtected()
{
VerifyKeyword(
@"protected $$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterSealed()
{
VerifyKeyword(
@"sealed $$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterStatic()
{
VerifyKeyword(
@"static $$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void NotAfterStaticInUsingDirective()
{
VerifyAbsence(
@"using static $$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void NotAfterClass()
{
VerifyAbsence(@"class $$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void NotBetweenUsings()
{
VerifyAbsence(AddInsideMethod(
@"using Foo;
$$
using Bar;"));
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterClassTypeParameterConstraint()
{
VerifyKeyword(
@"class C<T> where T : $$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterClassTypeParameterConstraint2()
{
VerifyKeyword(
@"class C<T>
where T : $$
where U : U");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterMethodTypeParameterConstraint()
{
VerifyKeyword(
@"class C {
void Foo<T>()
where T : $$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterMethodTypeParameterConstraint2()
{
VerifyKeyword(
@"class C {
void Foo<T>()
where T : $$
where U : T");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterNew()
{
VerifyKeyword(
@"class C {
new $$");
}
}
}
| |
using System;
using System.ComponentModel;
using System.Diagnostics;
using System.Drawing;
using System.Runtime.InteropServices;
using System.Windows.Forms;
namespace BizHawk.Client.EmuHawk
{
#region win32interop
[StructLayout(LayoutKind.Sequential)]
internal struct LvDispInfo
{
public NmHdr Hdr;
public LvItem Item;
}
[StructLayout(LayoutKind.Sequential)]
internal struct NmHdr
{
public IntPtr HwndFrom;
public IntPtr IdFrom;
public int Code;
}
[StructLayout(LayoutKind.Sequential)]
internal struct NmItemActivate
{
public NmHdr Hdr;
public int Item;
public int SubItem;
public uint NewState;
public uint OldState;
public uint uChanged;
public POINT Action;
public uint lParam;
public uint KeyFlags;
}
[StructLayout(LayoutKind.Sequential)]
internal struct RECT
{
public int Top;
public int Left;
public int Bottom;
public int Right;
}
[StructLayout(LayoutKind.Sequential)]
internal struct NmCustomDrawInfo
{
public NmHdr Hdr;
public uint dwDrawStage;
public IntPtr Hdc;
public RECT Rect;
public IntPtr dwItemSpec;
public uint ItemState;
private int _pad64bits;
public IntPtr lItemlParam;
}
[StructLayout(LayoutKind.Sequential)]
internal struct NmLvCustomDraw
{
public NmCustomDrawInfo Nmcd;
public int ClearText;
public int ClearTextBackground;
public int SubItem;
}
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)]
internal struct LvItem
{
public uint Mask;
public int Item;
public int SubItem;
public uint State;
public uint StateMask;
public IntPtr PszText;
public int cchTextMax;
public int Image;
public IntPtr lParam;
public int Indent;
}
[FlagsAttribute]
internal enum CustomDrawReturnFlags
{
CDRF_DODEFAULT = 0x00000000,
CDRF_NEWFONT = 0x00000002,
CDRF_SKIPDEFAULT = 0x00000004,
CDRF_NOTIFYPOSTPAINT = 0x00000010,
CDRF_NOTIFYITEMDRAW = 0x00000020,
CDRF_NOTIFYSUBITEMDRAW = 0x00000020,
CDRF_NOTIFYPOSTERASE = 0x00000040,
}
[FlagsAttribute]
internal enum CustomDrawDrawStageFlags
{
CDDS_PREPAINT = 0x00000001,
CDDS_POSTPAINT = 0x00000002,
CDDS_PREERASE = 0x00000003,
CDDS_POSTERASE = 0x00000004,
// the 0x000010000 bit means it's individual item specific
CDDS_ITEM = 0x00010000,
CDDS_ITEMPREPAINT = (CDDS_ITEM | CDDS_PREPAINT),
CDDS_ITEMPOSTPAINT = (CDDS_ITEM | CDDS_POSTPAINT),
CDDS_ITEMPREERASE = (CDDS_ITEM | CDDS_PREERASE),
CDDS_ITEMPOSTERASE = (CDDS_ITEM | CDDS_POSTERASE),
CDDS_SUBITEM = 0x00020000,
CDDS_SUBITEMPREPAINT = (CDDS_SUBITEM | CDDS_ITEMPREPAINT),
CDDS_SUBITEMPOSTPAINT = (CDDS_SUBITEM | CDDS_ITEMPOSTPAINT),
CDDS_SUBITEMPREERASE = (CDDS_SUBITEM | CDDS_ITEMPREERASE),
CDDS_SUBITEMPOSTERASE = (CDDS_SUBITEM | CDDS_ITEMPOSTERASE),
}
[FlagsAttribute]
internal enum LvHitTestFlags
{
LVHT_NOWHERE = 0x0001,
LVHT_ONITEMICON = 0x0002,
LVHT_ONITEMLABEL = 0x0004,
LVHT_ONITEMSTATEICON = 0x0008,
LVHT_ONITEM = (LVHT_ONITEMICON | LVHT_ONITEMLABEL | LVHT_ONITEMSTATEICON),
LVHT_ABOVE = 0x0008,
LVHT_BELOW = 0x0010,
LVHT_TORIGHT = 0x0020,
LVHT_TOLEFT = 0x0040
}
[StructLayout(LayoutKind.Sequential)]
internal struct POINT
{
public int X;
public int Y;
}
[StructLayout(LayoutKind.Sequential)]
internal class LvHitTestInfo
{
public POINT Point;
public uint Flags;
public int Item;
public int SubItem;
}
[StructLayout(LayoutKind.Sequential)]
internal struct NMLISTVIEW
{
public NmHdr hdr;
public int iItem;
public int iSubItem;
public uint uNewState;
public uint uOldState;
public uint uChanged;
public POINT ptAction;
public IntPtr lParam;
}
internal enum ListViewItemMask : short
{
LVIF_TEXT = 0x0001,
LVIF_IMAGE = 0x0002,
LVIF_PARAM = 0x0004,
LVIF_STATE = 0x0008,
LVIF_INDENT = 0x0010,
LVIF_NORECOMPUTE = 0x0800,
LVIF_GROUPID = 0x0100,
LVIF_COLUMNS = 0x0200
}
internal enum LvNi
{
ALL = 0x0000,
FOCUSED = 0x0001,
SELECTED = 0x0002,
CUT = 0x0004,
DROPHILITED = 0x0008,
ABOVE = 0x0100,
BELOW = 0x0200,
TOLEFT = 0x0400,
TORIGHT = 0x0800
}
internal enum ListViewMessages
{
LVM_FIRST = 0x1000,
LVM_GETITEMCOUNT = (LVM_FIRST + 4),
LVM_SETCALLBACKMASK = (LVM_FIRST + 11),
LVM_GETNEXTITEM = (LVM_FIRST + 12),
LVM_HITTEST = (LVM_FIRST + 18),
LVM_ENSUREVISIBLE = (LVM_FIRST + 19),
LVM_SETITEMSTATE = (LVM_FIRST + 43),
LVM_GETITEMSTATE = (LVM_FIRST + 44),
LVM_SETITEMCOUNT = (LVM_FIRST + 47),
LVM_GETSUBITEMRECT = (LVM_FIRST + 56)
}
internal enum ListViewStyles : short
{
LVS_OWNERDATA = 0x1000,
LVS_SORTASCENDING = 0x0010,
LVS_SORTDESCENDING = 0x0020,
LVS_SHAREIMAGELISTS = 0x0040,
LVS_NOLABELWRAP = 0x0080,
LVS_AUTOARRANGE = 0x0100
}
internal enum ListViewStylesICF : uint
{
LVSICF_NOINVALIDATEALL = 0x00000001,
LVSICF_NOSCROLL = 0x00000002
}
internal enum WindowsMessage : uint
{
WM_ERASEBKGND = 0x0014,
WM_SCROLL = 0x115,
WM_LBUTTONDOWN = 0x0201,
WM_LBUTTONUP = 0x0202,
WM_LBUTTONDBLCLK = 0x0203,
WM_RBUTTONDOWN = 0x0204,
WM_RBUTTONUP = 0x0205,
WM_RBUTTONDBLCLK = 0x0206,
WM_SETFOCUS = 0x0007,
WM_NOTIFY = 0x004E,
WM_USER = 0x0400,
WM_REFLECT = WM_USER + 0x1c00
}
internal enum Notices
{
NM_FIRST = 0,
NM_CUSTOMDRAW = NM_FIRST - 12,
NM_CLICK = NM_FIRST - 2,
NM_DBLCLICK = NM_FIRST - 3,
}
internal enum ListViewNotices
{
LVN_FIRST = (0 - 100),
LVN_LAST = (0 - 199),
LVN_BEGINDRAG = LVN_FIRST - 9,
LVN_BEGINRDRAG = LVN_FIRST - 11,
LVN_GETDISPINFOA = LVN_FIRST - 50,
LVN_GETDISPINFOW = LVN_FIRST - 77,
LVN_SETDISPINFOA = LVN_FIRST - 51,
LVN_SETDISPINFOW = LVN_FIRST - 78,
LVN_ODCACHEHINT = LVN_FIRST - 13,
LVN_ODFINDITEMW = LVN_FIRST - 79
}
[Flags]
internal enum ListViewCallBackMask : uint
{
LVIS_FOCUSED = 0x0001,
LVIS_SELECTED = 0x0002,
LVIS_CUT = 0x0004,
LVIS_DROPHILITED = 0x0008,
LVIS_GLOW = 0x0010,
LVIS_ACTIVATING = 0x0020,
LVIS_OVERLAYMASK = 0x0F00,
LVIS_STATEIMAGEMASK = 0xF000,
}
#endregion
#region VirtualListView Delegates
/// <summary>
/// Retrieve the background color for a Listview cell (item and subitem).
/// </summary>
/// <param name="item">Listview item (row).</param>
/// <param name="subItem">Listview subitem (column).</param>
/// <param name="color">Background color to use</param>
public delegate void QueryItemBkColorHandler(int item, int subItem, ref Color color);
/// <summary>
/// Retrieve the text for a Listview cell (item and subitem).
/// </summary>
/// <param name="item">Listview item (row).</param>
/// <param name="subItem">Listview subitem (column).</param>
/// <param name="text">Text to display.</param>
public delegate void QueryItemTextHandler(int item, int subItem, out string text);
/// <summary>
/// Retrieve the image index for a Listview item.
/// </summary>
/// <param name="item">Listview item (row).</param>
/// <param name="subItem">Listview subitem (column) - should always be zero.</param>
/// <param name="imageIndex">Index of associated ImageList.</param>
public delegate void QueryItemImageHandler(int item, int subItem, out int imageIndex);
/// <summary>
/// Retrieve the indent for a Listview item. The indent is always width of an image.
/// For example, 1 indents the Listview item one image width.
/// </summary>
/// <param name="item">Listview item (row).</param>
/// <param name="itemIndent">The amount to indent the Listview item.</param>
public delegate void QueryItemIndentHandler(int item, out int itemIndent);
public delegate void QueryItemHandler(int idx, out ListViewItem item);
#endregion
/// <summary>
/// VirtualListView is a virtual Listview which allows for a large number of items(rows)
/// to be displayed. The virtual Listview contains very little actual information -
/// mainly item selection and focus information.
/// </summary>
public class VirtualListView : ListView
{
// store the item count to prevent the call to SendMessage(LVM_GETITEMCOUNT)
private int _itemCount;
#region Display query callbacks
/// <summary>
/// Fire the QueryItemBkColor event which requests the background color for the passed Listview cell
/// </summary>
public event QueryItemBkColorHandler QueryItemBkColor;
/// <summary>
/// Fire the QueryItemText event which requests the text for the passed Listview cell.
/// </summary>
[Category("Data")]
public event QueryItemTextHandler QueryItemText;
/// <summary>
/// Fire the QueryItemImage event which requests the ImageIndex for the passed Listview item.
/// </summary>
[Category("Data")]
public event QueryItemImageHandler QueryItemImage;
/// <summary>
/// Fire the QueryItemIndent event which requests the indent for the passed Listview item.
/// </summary>
[Category("Data")]
public event QueryItemIndentHandler QueryItemIndent;
[Category("Data")]
public event QueryItemHandler QueryItem;
#endregion
#region Properties
/// <summary>
/// Gets or sets the sets the virtual number of items to be displayed.
/// </summary>
[Category("Behavior")]
public int ItemCount
{
get
{
return _itemCount;
}
set
{
_itemCount = value;
// If the virtual item count is set before the handle is created
// then the image lists don't get loaded properly
if (!IsHandleCreated)
{
return;
}
SetVirtualItemCount();
}
}
/// <summary>
/// Gets or sets how list items are to be displayed.
/// Hide the ListView.View property.
/// Virtual Listviews only allow Details or List.
/// </summary>
public new View View
{
get
{
return base.View;
}
set
{
if (value == View.LargeIcon ||
value == View.SmallIcon)
{
throw new ArgumentException($"Icon views are invalid for virtual {nameof(ListView)}s", nameof(View));
}
base.View = value;
}
}
/// <summary>
/// Gets the required creation parameters when the control handle is created.
/// Use LVS_OWNERDATA to set this as a virtual Listview.
/// </summary>
protected override CreateParams CreateParams
{
get
{
var cp = base.CreateParams;
// LVS_OWNERDATA style must be set when the control is created
cp.Style |= (int)ListViewStyles.LVS_OWNERDATA;
return cp;
}
}
#endregion
[Browsable(false)]
[DesignerSerializationVisibilityAttribute(DesignerSerializationVisibility.Hidden)]
public int LineHeight { get; private set; }
[Browsable(false)]
[DesignerSerializationVisibilityAttribute(DesignerSerializationVisibility.Hidden)]
public int NumberOfVisibleRows
{
get
{
return Height / LineHeight;
}
}
#region Constructors
/// <summary>
/// Initializes a new instance of the <see cref="VirtualListView"/> class.
/// Create a new instance of this control.
/// </summary>
public VirtualListView()
{
// virtual listviews must be Details or List view with no sorting
View = View.Details;
Sorting = SortOrder.None;
UseCustomBackground = true;
ptrlvhti = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(LvHitTestInfo)));
SetStyle(ControlStyles.OptimizedDoubleBuffer, true);
SetStyle(ControlStyles.Opaque, true);
SetStyle(ControlStyles.AllPaintingInWmPaint, true);
LineHeight = this.Font.Height + 5;
}
~VirtualListView()
{
Marshal.FreeHGlobal(ptrlvhti);
}
#endregion
#region Methods
/// <summary>
/// Set the state of the passed Listview item's index.
/// </summary>
/// <param name="index">Listview item's index.</param>
/// <param name="selected">Select the passed item?</param>
public void SelectItem(int index, bool selected)
{
var ptrItem = IntPtr.Zero;
try
{
// Determine whether selecting or unselecting.
uint select = selected ? (uint)ListViewCallBackMask.LVIS_SELECTED : 0;
// Fill in the LVITEM structure with state fields.
var stateItem = new LvItem
{
Mask = (uint)ListViewItemMask.LVIF_STATE,
Item = index,
SubItem = 0,
State = @select,
StateMask = (uint)ListViewCallBackMask.LVIS_SELECTED
};
// Copy the structure to unmanaged memory.
ptrItem = Marshal.AllocHGlobal(Marshal.SizeOf(stateItem.GetType()));
Marshal.StructureToPtr(stateItem, ptrItem, true);
// Send the message to the control window.
Win32.SendMessage(
this.Handle,
(int)ListViewMessages.LVM_SETITEMSTATE,
(IntPtr)index,
ptrItem);
}
catch (Exception ex)
{
System.Diagnostics.Trace.WriteLine($"VirtualListView.SetItemState error={ex.Message}");
// TODO: should this eat any exceptions?
throw;
}
finally
{
// Always release the unmanaged memory.
if (ptrItem != IntPtr.Zero)
{
Marshal.FreeHGlobal(ptrItem);
}
}
}
private void SetVirtualItemCount()
{
Win32.SendMessage(
this.Handle,
(int)ListViewMessages.LVM_SETITEMCOUNT,
(IntPtr)this._itemCount,
IntPtr.Zero);
}
protected void OnDispInfoNotice(ref Message m, bool useAnsi)
{
var info = (LvDispInfo)m.GetLParam(typeof(LvDispInfo));
if ((info.Item.Mask & (uint)ListViewItemMask.LVIF_TEXT) > 0)
{
if (QueryItemText != null)
{
string lvtext;
QueryItemText(info.Item.Item, info.Item.SubItem, out lvtext);
if (lvtext != null)
{
try
{
int maxIndex = Math.Min(info.Item.cchTextMax - 1, lvtext.Length);
var data = new char[maxIndex + 1];
lvtext.CopyTo(0, data, 0, lvtext.Length);
data[maxIndex] = '\0';
Marshal.Copy(data, 0, info.Item.PszText, data.Length);
}
catch (Exception e)
{
Debug.WriteLine($"Failed to copy text name from client: {e}", $"{nameof(VirtualListView)}.{nameof(OnDispInfoNotice)}");
}
}
}
}
if ((info.Item.Mask & (uint)ListViewItemMask.LVIF_IMAGE) > 0)
{
int imageIndex = 0;
if (QueryItemImage != null)
{
QueryItemImage(info.Item.Item, info.Item.SubItem, out imageIndex);
}
info.Item.Image = imageIndex;
Marshal.StructureToPtr(info, m.LParam, false);
}
if ((info.Item.Mask & (uint)ListViewItemMask.LVIF_INDENT) > 0)
{
int itemIndent = 0;
if (QueryItemIndent != null)
{
QueryItemIndent(info.Item.Item, out itemIndent);
}
info.Item.Indent = itemIndent;
Marshal.StructureToPtr(info, m.LParam, false);
}
m.Result = new IntPtr(0);
}
protected void OnCustomDrawNotice(ref Message m)
{
var cd = (NmLvCustomDraw)m.GetLParam(typeof(NmLvCustomDraw));
switch (cd.Nmcd.dwDrawStage)
{
case (int)CustomDrawDrawStageFlags.CDDS_ITEMPREPAINT:
case (int)CustomDrawDrawStageFlags.CDDS_PREPAINT:
m.Result = new IntPtr((int)CustomDrawReturnFlags.CDRF_NOTIFYSUBITEMDRAW);
break;
case (int)CustomDrawDrawStageFlags.CDDS_SUBITEMPREPAINT:
if (QueryItemBkColor != null)
{
var color = Color.FromArgb(cd.ClearTextBackground & 0xFF, (cd.ClearTextBackground >> 8) & 0xFF, (cd.ClearTextBackground >> 16) & 0xFF);
QueryItemBkColor(cd.Nmcd.dwItemSpec.ToInt32(), cd.SubItem, ref color);
cd.ClearTextBackground = (color.B << 16) | (color.G << 8) | color.R;
Marshal.StructureToPtr(cd, m.LParam, false);
}
m.Result = new IntPtr((int)CustomDrawReturnFlags.CDRF_DODEFAULT);
break;
}
}
/// <summary>
/// Event to be fired whenever the control scrolls
/// </summary>
public event ScrollEventHandler Scroll;
protected virtual void OnScroll(ScrollEventArgs e)
{
var handler = this.Scroll;
if (handler != null)
{
handler(this, e);
}
}
[DllImport("user32.dll", CharSet = CharSet.Auto)]
public static extern int GetScrollPos(IntPtr hWnd, Orientation nBar);
/// <summary>
/// Gets the Vertical Scroll position of the control.
/// </summary>
public int VScrollPos
{
get { return GetScrollPos(this.Handle, Orientation.Vertical); }
}
protected override void WndProc(ref Message m)
{
var messageProcessed = false;
switch (m.Msg)
{
case (int)WindowsMessage.WM_REFLECT + (int)WindowsMessage.WM_NOTIFY:
var nm1 = (NmHdr)m.GetLParam(typeof(NmHdr));
switch (nm1.Code)
{
case (int)Notices.NM_CUSTOMDRAW:
OnCustomDrawNotice(ref m);
messageProcessed = true;
if (QueryItemBkColor == null || !UseCustomBackground)
{
m.Result = (IntPtr)0;
}
break;
case (int)ListViewNotices.LVN_GETDISPINFOW:
OnDispInfoNotice(ref m, false);
messageProcessed = true;
break;
case (int)ListViewNotices.LVN_BEGINDRAG:
OnBeginItemDrag(MouseButtons.Left, ref m);
messageProcessed = true;
break;
case (int)ListViewNotices.LVN_BEGINRDRAG:
OnBeginItemDrag(MouseButtons.Right, ref m);
messageProcessed = true;
break;
}
break;
case (int)WindowsMessage.WM_SCROLL:
// http://stackoverflow.com/questions/1851620/handling-scroll-event-on-listview-in-c-sharp
OnScroll(new ScrollEventArgs((ScrollEventType)(m.WParam.ToInt32() & 0xffff), m.WParam.ToInt32()));
break;
case (int)WindowsMessage.WM_ERASEBKGND:
if (BlazingFast)
{
messageProcessed = true;
m.Result = new IntPtr(1);
}
break;
}
if (!messageProcessed)
{
try
{
base.WndProc(ref m);
}
catch (Exception ex)
{
Trace.WriteLine($"Message {m} caused an exception: {ex.Message}");
}
}
}
public bool BlazingFast { get; set; }
public bool UseCustomBackground { get; set; }
protected ListViewItem GetItem(int idx)
{
ListViewItem item = null;
if (QueryItem != null)
{
QueryItem(idx, out item);
}
if (item == null)
{
throw new ArgumentException($"cannot find item {idx} via {nameof(QueryItem)} event");
}
return item;
}
protected void OnBeginItemDrag(MouseButtons mouseButton, ref Message m)
{
var info = (NMLISTVIEW)m.GetLParam(typeof(NMLISTVIEW));
ListViewItem item = null;
if (QueryItem != null)
{
QueryItem(info.iItem, out item);
}
OnItemDrag(new ItemDragEventArgs(mouseButton, item));
}
protected override void OnHandleCreated(EventArgs e)
{
base.OnHandleCreated(e);
// ensure the value for ItemCount is sent to the control properly if the user set it
// before the handle was created
SetVirtualItemCount();
}
protected override void OnHandleDestroyed(EventArgs e)
{
// the ListView OnHandleDestroyed accesses the Items list for all selected items
ItemCount = 0;
base.OnHandleDestroyed(e);
}
#endregion
LvHitTestInfo lvhti = new LvHitTestInfo();
IntPtr ptrlvhti;
int selection = -1;
public int hitTest(int x, int y)
{
lvhti.Point.X = x;
lvhti.Point.Y = y;
Marshal.StructureToPtr(lvhti, ptrlvhti, true);
int z = (int)Win32.SendMessage(this.Handle, (int)ListViewMessages.LVM_HITTEST, (IntPtr)0, ptrlvhti);
Marshal.PtrToStructure(ptrlvhti, lvhti);
return z;
}
public void ensureVisible(int index)
{
Win32.SendMessage(Handle, (int)ListViewMessages.LVM_ENSUREVISIBLE, (IntPtr)index, (IntPtr)1);
}
public void ensureVisible()
{
ensureVisible(selectedItem);
}
public void setSelection(int index)
{
clearSelection();
selection = index;
SelectItem(selection, true);
}
public int selectedItem
{
get
{
if (SelectedIndices.Count == 0)
{
return -1;
}
else
{
return SelectedIndices[0];
}
}
set
{
setSelection(value);
}
}
public void clearSelection()
{
if (selection != -1)
{
SelectItem(selection, false);
}
selection = -1;
}
// Informs user that a select all event is in place, can be used in change events to wait until this is false
public bool SelectAllInProgress { get; set; }
public void SelectAll()
{
this.BeginUpdate();
SelectAllInProgress = true;
for (var i = 0; i < _itemCount; i++)
{
if (i == _itemCount - 1)
{
SelectAllInProgress = false;
}
this.SelectItem(i, true);
}
this.EndUpdate();
}
public void DeselectAll()
{
this.BeginUpdate();
SelectAllInProgress = true;
for (var i = 0; i < _itemCount; i++)
{
if (i == _itemCount - 1)
{
SelectAllInProgress = false;
}
this.SelectItem(i, false);
}
this.EndUpdate();
}
protected override void OnKeyDown(KeyEventArgs e)
{
if (e.KeyCode == Keys.A && e.Control && !e.Alt && !e.Shift) // Select All
{
SelectAll();
}
base.OnKeyDown(e);
}
}
}
| |
// Copyright (c) The Avalonia Project. All rights reserved.
// Licensed under the MIT license. See licence.md file in the project root for full license information.
using Avalonia.Controls;
using Xunit;
namespace Avalonia.Input.UnitTests
{
using Controls = Controls.Controls;
public class KeyboardNavigationTests_Arrows
{
[Fact]
public void Down_Continue_Returns_Down_Control_In_Container()
{
Button current;
Button next;
var top = new StackPanel
{
Children = new Controls
{
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Continue,
Children = new Controls
{
new Button { Name = "Button1" },
(current = new Button { Name = "Button2" }),
(next = new Button { Name = "Button3" }),
}
},
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Continue,
Children = new Controls
{
new Button { Name = "Button4" },
new Button { Name = "Button5" },
new Button { Name = "Button6" },
}
},
}
};
var result = KeyboardNavigationHandler.GetNext(current, NavigationDirection.Down);
Assert.Equal(next, result);
}
[Fact]
public void Down_Continue_Returns_First_Control_In_Down_Sibling_Container()
{
Button current;
Button next;
var top = new StackPanel
{
Children = new Controls
{
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Continue,
Children = new Controls
{
new Button { Name = "Button1" },
new Button { Name = "Button2" },
(current = new Button { Name = "Button3" }),
}
},
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Continue,
Children = new Controls
{
(next = new Button { Name = "Button4" }),
new Button { Name = "Button5" },
new Button { Name = "Button6" },
}
},
}
};
var result = KeyboardNavigationHandler.GetNext(current, NavigationDirection.Down);
Assert.Equal(next, result);
}
[Fact]
public void Down_Continue_Returns_Down_Sibling()
{
Button current;
Button next;
var top = new StackPanel
{
Children = new Controls
{
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Continue,
Children = new Controls
{
new Button { Name = "Button1" },
new Button { Name = "Button2" },
(current = new Button { Name = "Button3" }),
}
},
(next = new Button { Name = "Button4" }),
}
};
var result = KeyboardNavigationHandler.GetNext(current, NavigationDirection.Down);
Assert.Equal(next, result);
}
[Fact]
public void Down_Continue_Returns_First_Control_In_Down_Uncle_Container()
{
Button current;
Button next;
var top = new StackPanel
{
Children = new Controls
{
new StackPanel
{
Children = new Controls
{
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Continue,
Children = new Controls
{
new Button { Name = "Button1" },
new Button { Name = "Button2" },
(current = new Button { Name = "Button3" }),
}
},
},
},
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Continue,
Children = new Controls
{
(next = new Button { Name = "Button4" }),
new Button { Name = "Button5" },
new Button { Name = "Button6" },
}
},
}
};
var result = KeyboardNavigationHandler.GetNext(current, NavigationDirection.Down);
Assert.Equal(next, result);
}
[Fact]
public void Down_Continue_Returns_Child_Of_Top_Level()
{
Button next;
var top = new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Continue,
Children = new Controls
{
(next = new Button { Name = "Button1" }),
}
};
var result = KeyboardNavigationHandler.GetNext(top, NavigationDirection.Down);
Assert.Equal(next, result);
}
[Fact]
public void Down_Continue_Wraps()
{
Button current;
Button next;
var top = new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Continue,
Children = new Controls
{
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Continue,
Children = new Controls
{
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Continue,
Children = new Controls
{
(next = new Button { Name = "Button1" }),
new Button { Name = "Button2" },
new Button { Name = "Button3" },
}
},
},
},
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Continue,
Children = new Controls
{
new Button { Name = "Button4" },
new Button { Name = "Button5" },
(current = new Button { Name = "Button6" }),
}
},
}
};
var result = KeyboardNavigationHandler.GetNext(current, NavigationDirection.Down);
Assert.Equal(next, result);
}
[Fact]
public void Down_Cycle_Returns_Down_Control_In_Container()
{
Button current;
Button next;
var top = new StackPanel
{
Children = new Controls
{
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Cycle,
Children = new Controls
{
new Button { Name = "Button1" },
(current = new Button { Name = "Button2" }),
(next = new Button { Name = "Button3" }),
}
},
new StackPanel
{
Children = new Controls
{
new Button { Name = "Button4" },
new Button { Name = "Button5" },
new Button { Name = "Button6" },
}
},
}
};
var result = KeyboardNavigationHandler.GetNext(current, NavigationDirection.Down);
Assert.Equal(next, result);
}
[Fact]
public void Down_Cycle_Wraps_To_First()
{
Button current;
Button next;
var top = new StackPanel
{
Children = new Controls
{
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Cycle,
Children = new Controls
{
(next = new Button { Name = "Button1" }),
new Button { Name = "Button2" },
(current = new Button { Name = "Button3" }),
}
},
new StackPanel
{
Children = new Controls
{
new Button { Name = "Button4" },
new Button { Name = "Button5" },
new Button { Name = "Button6" },
}
},
}
};
var result = KeyboardNavigationHandler.GetNext(current, NavigationDirection.Down);
Assert.Equal(next, result);
}
[Fact]
public void Down_Contained_Returns_Down_Control_In_Container()
{
Button current;
Button next;
var top = new StackPanel
{
Children = new Controls
{
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Contained,
Children = new Controls
{
new Button { Name = "Button1" },
(current = new Button { Name = "Button2" }),
(next = new Button { Name = "Button3" }),
}
},
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Contained,
Children = new Controls
{
new Button { Name = "Button4" },
new Button { Name = "Button5" },
new Button { Name = "Button6" },
}
},
}
};
var result = KeyboardNavigationHandler.GetNext(current, NavigationDirection.Down);
Assert.Equal(next, result);
}
[Fact]
public void Down_Contained_Stops_At_End()
{
Button current;
var top = new StackPanel
{
Children = new Controls
{
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Contained,
Children = new Controls
{
new Button { Name = "Button1" },
new Button { Name = "Button2" },
(current = new Button { Name = "Button3" }),
}
},
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Contained,
Children = new Controls
{
new Button { Name = "Button4" },
new Button { Name = "Button5" },
new Button { Name = "Button6" },
}
},
}
};
var result = KeyboardNavigationHandler.GetNext(current, NavigationDirection.Down);
Assert.Null(result);
}
[Fact]
public void Down_None_Does_Nothing()
{
Button current;
var top = new StackPanel
{
Children = new Controls
{
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.None,
Children = new Controls
{
new Button { Name = "Button1" },
(current = new Button { Name = "Button2" }),
new Button { Name = "Button3" },
}
},
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Contained,
Children = new Controls
{
new Button { Name = "Button4" },
new Button { Name = "Button5" },
new Button { Name = "Button6" },
}
},
}
};
var result = KeyboardNavigationHandler.GetNext(current, NavigationDirection.Down);
Assert.Null(result);
}
[Fact]
public void Up_Continue_Returns_Up_Control_In_Container()
{
Button current;
Button next;
var top = new StackPanel
{
Children = new Controls
{
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Continue,
Children = new Controls
{
new Button { Name = "Button1" },
(next = new Button { Name = "Button2" }),
(current = new Button { Name = "Button3" }),
}
},
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Contained,
Children = new Controls
{
new Button { Name = "Button4" },
new Button { Name = "Button5" },
new Button { Name = "Button6" },
}
},
}
};
var result = KeyboardNavigationHandler.GetNext(current, NavigationDirection.Up);
Assert.Equal(next, result);
}
[Fact]
public void Up_Continue_Returns_Last_Control_In_Up_Sibling_Container()
{
Button current;
Button next;
var top = new StackPanel
{
Children = new Controls
{
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Continue,
Children = new Controls
{
new Button { Name = "Button1" },
new Button { Name = "Button2" },
(next = new Button { Name = "Button3" }),
}
},
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Continue,
Children = new Controls
{
(current = new Button { Name = "Button4" }),
new Button { Name = "Button5" },
new Button { Name = "Button6" },
}
},
}
};
var result = KeyboardNavigationHandler.GetNext(current, NavigationDirection.Up);
Assert.Equal(next, result);
}
[Fact]
public void Up_Continue_Returns_Last_Child_Of_Sibling()
{
Button current;
Button next;
var top = new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Continue,
Children = new Controls
{
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Continue,
Children = new Controls
{
new Button { Name = "Button1" },
new Button { Name = "Button2" },
(next = new Button { Name = "Button3" }),
}
},
(current = new Button { Name = "Button4" }),
}
};
var result = KeyboardNavigationHandler.GetNext(current, NavigationDirection.Up);
Assert.Equal(next, result);
}
[Fact]
public void Up_Continue_Returns_Last_Control_In_Up_Nephew_Container()
{
Button current;
Button next;
var top = new StackPanel
{
Children = new Controls
{
new StackPanel
{
Children = new Controls
{
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Continue,
Children = new Controls
{
new Button { Name = "Button1" },
new Button { Name = "Button2" },
(next = new Button { Name = "Button3" }),
}
},
},
},
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Continue,
Children = new Controls
{
(current = new Button { Name = "Button4" }),
new Button { Name = "Button5" },
new Button { Name = "Button6" },
}
},
}
};
var result = KeyboardNavigationHandler.GetNext(current, NavigationDirection.Up);
Assert.Equal(next, result);
}
[Fact]
public void Up_Continue_Wraps()
{
Button current;
Button next;
var top = new StackPanel
{
Children = new Controls
{
new StackPanel
{
Children = new Controls
{
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Continue,
Children = new Controls
{
(current = new Button { Name = "Button1" }),
new Button { Name = "Button2" },
new Button { Name = "Button3" },
}
},
},
},
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Continue,
Children = new Controls
{
new Button { Name = "Button4" },
new Button { Name = "Button5" },
(next = new Button { Name = "Button6" }),
}
},
}
};
var result = KeyboardNavigationHandler.GetNext(current, NavigationDirection.Up);
Assert.Equal(next, result);
}
[Fact]
public void Up_Continue_Returns_Parent()
{
Button current;
var top = new Decorator
{
Focusable = true,
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Continue,
Child = current = new Button
{
Name = "Button",
}
};
var result = KeyboardNavigationHandler.GetNext(current, NavigationDirection.Up);
Assert.Equal(top, result);
}
[Fact]
public void Up_Cycle_Returns_Up_Control_In_Container()
{
Button current;
Button next;
var top = new StackPanel
{
Children = new Controls
{
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Cycle,
Children = new Controls
{
(next = new Button { Name = "Button1" }),
(current = new Button { Name = "Button2" }),
new Button { Name = "Button3" },
}
},
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Cycle,
Children = new Controls
{
new Button { Name = "Button4" },
new Button { Name = "Button5" },
new Button { Name = "Button6" },
}
},
}
};
var result = KeyboardNavigationHandler.GetNext(current, NavigationDirection.Up);
Assert.Equal(next, result);
}
[Fact]
public void Up_Cycle_Wraps_To_Last()
{
Button current;
Button next;
var top = new StackPanel
{
Children = new Controls
{
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Cycle,
Children = new Controls
{
(current = new Button { Name = "Button1" }),
new Button { Name = "Button2" },
(next = new Button { Name = "Button3" }),
}
},
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Cycle,
Children = new Controls
{
new Button { Name = "Button4" },
new Button { Name = "Button5" },
new Button { Name = "Button6" },
}
},
}
};
var result = KeyboardNavigationHandler.GetNext(current, NavigationDirection.Up);
Assert.Equal(next, result);
}
[Fact]
public void Up_Contained_Returns_Up_Control_In_Container()
{
Button current;
Button next;
var top = new StackPanel
{
Children = new Controls
{
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Contained,
Children = new Controls
{
(next = new Button { Name = "Button1" }),
(current = new Button { Name = "Button2" }),
new Button { Name = "Button3" },
}
},
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Contained,
Children = new Controls
{
new Button { Name = "Button4" },
new Button { Name = "Button5" },
new Button { Name = "Button6" },
}
},
}
};
var result = KeyboardNavigationHandler.GetNext(current, NavigationDirection.Up);
Assert.Equal(next, result);
}
[Fact]
public void Up_Contained_Stops_At_Beginning()
{
Button current;
var top = new StackPanel
{
Children = new Controls
{
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Contained,
Children = new Controls
{
(current = new Button { Name = "Button1" }),
new Button { Name = "Button2" },
new Button { Name = "Button3" },
}
},
new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Contained,
Children = new Controls
{
new Button { Name = "Button4" },
new Button { Name = "Button5" },
new Button { Name = "Button6" },
}
},
}
};
var result = KeyboardNavigationHandler.GetNext(current, NavigationDirection.Up);
Assert.Null(result);
}
[Fact]
public void Up_Contained_Doesnt_Return_Child_Control()
{
Decorator current;
var top = new StackPanel
{
[KeyboardNavigation.DirectionalNavigationProperty] = KeyboardNavigationMode.Contained,
Children = new Controls
{
(current = new Decorator
{
Focusable = true,
Child = new Button(),
})
}
};
var result = KeyboardNavigationHandler.GetNext(current, NavigationDirection.Up);
Assert.Null(result);
}
}
}
| |
using System;
using System.Collections;
using System.Data;
using System.Linq;
using PCSComMaterials.Plan.DS;
using PCSComProcurement.Purchase.DS;
using PCSComProduct.Items.DS;
using PCSComUtils.Common.BO;
using PCSComUtils.MasterSetup.DS;
using PCSComUtils.PCSExc;
using PCSComUtils.Common;
namespace PCSComProcurement.Purchase.BO
{
/// <summary>
/// Summary description for .
/// </summary>
public class PurchaseOrderBO
{
private const string THIS = "PCSComProcurement.Purchase.BO.PurchaseOrderBO";
const int INT_BEGIN_DATA_ROW = 3;
const int INDEX_CODE = 2;
const int INDEX_NAME = 3;
const int INDEX_REVISION = 4;
public object GetMasterVO(int pintPOMasterID)
{
PO_PurchaseOrderMasterDS dsMaster = new PO_PurchaseOrderMasterDS();
return dsMaster.GetObjectVO(pintPOMasterID);
}
public DataRow LoadObjectVO(int pintID)
{
PO_PurchaseOrderMasterDS dsMaster = new PO_PurchaseOrderMasterDS();
return dsMaster.LoadObjectVO(pintID);
}
public object GetPartyVO(int pintID)
{
MST_PartyDS dsMaster = new MST_PartyDS();
return dsMaster.GetObjectVO(pintID);
}
public DataSet ListDetailByMaster(int pintID)
{
PO_PurchaseOrderDetailDS dsDetail = new PO_PurchaseOrderDetailDS();
return dsDetail.List(pintID);
}
public object GetExchangeRate(int pintCurrencyID,DateTime pdtmOrderDate)
{
MST_ExchangeRateDS dsMST = new MST_ExchangeRateDS();
return dsMST.GetExchangeRate(pintCurrencyID,pdtmOrderDate);
}
public void UpdatePurchaseOrder(object pvoPOMaster,DataSet pdstDetail)
{
PO_PurchaseOrderMasterDS dsMaster = new PO_PurchaseOrderMasterDS();
dsMaster.Update(pvoPOMaster);
if(pdstDetail != null)
{
foreach (DataRow objRow in pdstDetail.Tables[0].Rows)
{
if(objRow.RowState == DataRowState.Deleted) continue;
objRow[PO_PurchaseOrderDetailTable.PURCHASEORDERMASTERID_FLD] = ((PO_PurchaseOrderMasterVO)pvoPOMaster).PurchaseOrderMasterID;
}
}
PO_PurchaseOrderDetailDS dsDetail = new PO_PurchaseOrderDetailDS();
dsDetail.UpdateDataSet(pdstDetail);
}
public int AddNewPurchaseOrder(object pvoPOMaster,DataSet pdstPODetail)
{
PO_PurchaseOrderMasterDS dsMaster = new PO_PurchaseOrderMasterDS();
int intPOMasterID = dsMaster.AddAndReturnID(pvoPOMaster);
foreach (DataRow objRow in pdstPODetail.Tables[0].Rows)
{
if(objRow.RowState == DataRowState.Deleted) continue;
objRow[PO_PurchaseOrderDetailTable.PURCHASEORDERMASTERID_FLD] = intPOMasterID;
}
PO_PurchaseOrderDetailDS dsDetail = new PO_PurchaseOrderDetailDS();
dsDetail.UpdateDataSet(pdstPODetail);
return intPOMasterID;
}
public DataTable ListUnitOfMeasure()
{
MST_UnitOfMeasureDS dsUnitOfMeasure = new MST_UnitOfMeasureDS();
return dsUnitOfMeasure.List().Tables[0];
}
public object GetUnitOfMeasure(int pintID)
{
MST_UnitOfMeasureDS dsUnitOfMeasure = new MST_UnitOfMeasureDS();
return dsUnitOfMeasure.GetObjectVO(pintID);
}
public void DeletePurchaseOrder(int pintMasterID)
{
PO_PurchaseOrderDetailDS dsDetail = new PO_PurchaseOrderDetailDS();
DataSet dstDetail = dsDetail.List(pintMasterID);
foreach(DataRow drow in dstDetail.Tables[0].Rows)
{
dsDetail.Delete(int.Parse(drow[PO_PurchaseOrderDetailTable.PURCHASEORDERDETAILID_FLD].ToString()));
}
PO_PurchaseOrderMasterDS dsMaster = new PO_PurchaseOrderMasterDS();
dsMaster.Delete(pintMasterID);
}
public object GetProductVO(int pintID)
{
ITM_ProductDS dsProduct = new ITM_ProductDS();
return dsProduct.GetObjectVO(pintID);
}
/// <summary>
/// CheckReferenceAndRevisionNo
/// </summary>
/// <param name="pstrReferenceNo"></param>
/// <param name="pstrRevision"></param>
/// <returns></returns>
/// <author>Trada</author>
/// <date>Friday, September 22 2006</date>
public bool CheckReferenceAndRevisionNo(string pstrReferenceNo, string pstrRevision, int pintPOMasterID)
{
PO_PurchaseOrderMasterDS dsPurchaseOrderMaster = new PO_PurchaseOrderMasterDS();
DataSet dstCount = new DataSet();
dstCount = dsPurchaseOrderMaster.CheckReferenceAndRevisionNo(pstrReferenceNo, pstrRevision, pintPOMasterID);
if (dstCount.Tables[0].Rows.Count > 0)
{
if (int.Parse(dstCount.Tables[0].Rows[0][0].ToString()) > 0)
{
return false;
}
}
return true;
}
public object GetPurchaseOrderByCode(string pstrCode)
{
PO_PurchaseOrderMasterDS dsMaster = new PO_PurchaseOrderMasterDS();
return dsMaster.GetObjectVO(pstrCode);
}
public int IsValidateData(string pstrValue,string pstrTable,string pstrField,string pstrCodition)
{
PO_PurchaseOrderMasterDS dsMaster = new PO_PurchaseOrderMasterDS();
return dsMaster.IsValidateData(pstrValue,pstrTable,pstrField,pstrCodition);
}
public DataRow GetDataRow(string pstrKeyField,string pstrValue,string pstrTable,string pstrField,string pstrCodition)
{
PO_PurchaseOrderMasterDS dsMaster = new PO_PurchaseOrderMasterDS();
return dsMaster.GetDataRow(pstrKeyField,pstrValue,pstrTable,pstrField,pstrCodition);
}
public void Add(object pObjectDetail)
{
// TODO:
}
public void Delete(object pObjectVO)
{
// TODO:
}
public object GetObjectVO(int pintID, string VOclass)
{
// TODO:
return null;
}
public void Update(object pObjectDetail)
{
// TODO:
}
public void UpdateDataSet(DataSet dstData)
{
// TODO:
}
public DataRow GetVendorLocationByVendor(int pintPartyID)
{
return null;
}
public DataSet GetItemVendorReference(int pintPartyID, Hashtable pobjProductID)
{
return null;
}
/// <summary>
/// Add new for case : Convert CPO to new PO
/// </summary>
/// <param name="pdstCPODetail"></param>
/// <param name="pobjMasterVO"></param>
/// <param name="pdstDetail"></param>
///<author>TuanDM</author>
public int AddPOAndDelSchedule(System.Data.DataSet pdstCPODetail, object pobjMasterVO, System.Data.DataSet pdstDetail)
{
//Add Master and Get returning ID
PO_PurchaseOrderMasterDS dsPOMaster = new PO_PurchaseOrderMasterDS();
int intMasterID = dsPOMaster.AddAndReturnID(pobjMasterVO);
//Add detail
ArrayList arlDueDate = new ArrayList();
foreach (DataRow drowDetail in pdstDetail.Tables[0].Rows)
{
drowDetail[PO_PurchaseOrderDetailTable.PURCHASEORDERMASTERID_FLD] = intMasterID;
arlDueDate.Add(drowDetail[PO_DeliveryScheduleTable.SCHEDULEDATE_FLD].ToString());
}
PO_PurchaseOrderDetailDS dsPODetail = new PO_PurchaseOrderDetailDS();
dsPODetail.UpdateDataSet(pdstDetail);
//Get PODetail DataSet which includes PURCHASEORDERDETAILID_FLD
pdstDetail = dsPODetail.List(intMasterID);
//Create DeliverySchedule -- and add DeliverySchedule
PO_DeliveryScheduleVO voSchedule;
for (int i =0; i < pdstDetail.Tables[0].Rows.Count; i++)
{
voSchedule = new PO_DeliveryScheduleVO();
voSchedule.DeliveryLine = 1;
voSchedule.PurchaseOrderDetailID = int.Parse(pdstDetail.Tables[0].Rows[i][PO_PurchaseOrderDetailTable.PURCHASEORDERDETAILID_FLD].ToString());
voSchedule.ScheduleDate = DateTime.Parse(arlDueDate[i].ToString());
voSchedule.DeliveryQuantity = decimal.Parse(pdstDetail.Tables[0].Rows[i][PO_PurchaseOrderDetailTable.ORDERQUANTITY_FLD].ToString());
new PO_DeliveryScheduleDS().Add(voSchedule);
}
//Update CPODetail
new MTR_CPODS().SetPOMasterID(null, intMasterID);
//return ID
return intMasterID;
}
/// <summary>
/// Update PO and Delivery Schedule in case : CPO to Exist PO
/// </summary>
/// <param name="pdstCPODetail"></param>
/// <param name="pobjMasterVO"></param>
/// <param name="pdstDetail"></param>
/// <author>TuanDM</author>
public void UpdatePOAndDelSchedule(System.Data.DataSet pdstCPODetail, object pobjMasterVO, System.Data.DataSet pdstDetail)
{
//Update PO Master
PO_PurchaseOrderMasterDS dsMaster = new PO_PurchaseOrderMasterDS();
dsMaster.Update(pobjMasterVO);
//Update PO Detail
ArrayList arlIndexs = new ArrayList();
ArrayList arlDueDate = new ArrayList();
int intinD = -1;
if(pdstDetail != null)
foreach (DataRow objRow in pdstDetail.Tables[0].Rows)
{
intinD++;
if(objRow.RowState == DataRowState.Deleted) continue;
objRow[PO_PurchaseOrderDetailTable.PURCHASEORDERMASTERID_FLD] = ((PO_PurchaseOrderMasterVO)pobjMasterVO).PurchaseOrderMasterID;
if (objRow[PO_DeliveryScheduleTable.SCHEDULEDATE_FLD].ToString() != string.Empty)
{
arlDueDate.Add(objRow[PO_DeliveryScheduleTable.SCHEDULEDATE_FLD]);
arlIndexs.Add(intinD);
}
}
PO_PurchaseOrderDetailDS dsDetail = new PO_PurchaseOrderDetailDS();
dsDetail.UpdateDataSet(pdstDetail);
//Update DeliverySchedule
pdstDetail = dsDetail.List(((PO_PurchaseOrderMasterVO) pobjMasterVO).PurchaseOrderMasterID);
//Create DeliverySchedule -- and add DeliverySchedule
for (int i =0; i < arlIndexs.Count; i++)
{
PO_DeliveryScheduleVO voSchedule = new PO_DeliveryScheduleVO();
voSchedule.DeliveryLine = 1;
voSchedule.PurchaseOrderDetailID = int.Parse(pdstDetail.Tables[0].Rows[int.Parse(arlIndexs[i].ToString())][PO_DeliveryScheduleTable.PURCHASEORDERDETAILID_FLD].ToString());
voSchedule.ScheduleDate = DateTime.Parse(arlDueDate[i].ToString());
voSchedule.DeliveryQuantity = decimal.Parse(pdstDetail.Tables[0].Rows[int.Parse(arlIndexs[i].ToString())][PO_PurchaseOrderDetailTable.ORDERQUANTITY_FLD].ToString());
new PO_DeliveryScheduleDS().Add(voSchedule);
}
//Update CPODetail
new MTR_CPODS().SetPOMasterID(null, ((PO_PurchaseOrderMasterVO) pobjMasterVO).PurchaseOrderMasterID);
}
public DataRow GetVendorContact(int pintPartyLocationID)
{
return null;
}
/// <summary>
/// Add new for case : Convert CPO to new PO
/// </summary>
/// <param name="pobjMasterVO"></param>
/// <param name="pdstDetail"></param>
/// <param name="pdstDelivery"></param>
///<author>TuanDM</author>
public int AddPOAndDelScheduleImmediate(object pobjMasterVO, System.Data.DataSet pdstDetail, DataSet pdstDelivery, ArrayList parlCPOIDs)
{
//Add Master and Get returning ID
PO_PurchaseOrderMasterDS dsPOMaster = new PO_PurchaseOrderMasterDS();
int intMasterID = dsPOMaster.AddAndReturnID(pobjMasterVO);
//Add detail
foreach (DataRow drowDetail in pdstDetail.Tables[0].Rows)
{
drowDetail[PO_PurchaseOrderDetailTable.PURCHASEORDERMASTERID_FLD] = intMasterID;
}
PO_PurchaseOrderDetailDS dsPODetail = new PO_PurchaseOrderDetailDS();
dsPODetail.UpdateDataSet(pdstDetail);
//Get PODetail DataSet which includes PURCHASEORDERDETAILID_FLD
pdstDetail = dsPODetail.ListToGetID(intMasterID);
//Create DeliverySchedule -- and add DeliverySchedule
int intPOLineID =0;
int i =0;
PO_DeliveryScheduleDS dsDelivery = new PO_DeliveryScheduleDS();
while (i++ < pdstDetail.Tables[0].Rows.Count)
{
DataSet dstNewDelPO = pdstDelivery.Clone();
DataRow[] drowSameDelivery = pdstDelivery.Tables[0].Select(ITM_ProductTable.PRODUCTID_FLD + "='" + pdstDetail.Tables[0].Rows[i-1][ITM_ProductTable.PRODUCTID_FLD].ToString() + "'");
intPOLineID = (int) pdstDetail.Tables[0].Rows[i-1][PO_DeliveryScheduleTable.PURCHASEORDERDETAILID_FLD];
foreach (DataRow t in drowSameDelivery)
{
int k = GetIndexForDeliveryLine(dstNewDelPO, (DateTime) t[PO_DeliveryScheduleTable.SCHEDULEDATE_FLD]);
if (k > -1)
{
dstNewDelPO.Tables[0].Rows[k][PO_DeliveryScheduleTable.DELIVERYQUANTITY_FLD] = (decimal)dstNewDelPO.Tables[0].Rows[k][PO_DeliveryScheduleTable.DELIVERYQUANTITY_FLD] + (decimal) t[PO_DeliveryScheduleTable.DELIVERYQUANTITY_FLD];
t[PO_DeliveryScheduleTable.PURCHASEORDERDETAILID_FLD] = intPOLineID;
}
else
{
t[PO_DeliveryScheduleTable.PURCHASEORDERDETAILID_FLD] = intPOLineID;
t[PO_DeliveryScheduleTable.DELIVERYLINE_FLD] = dstNewDelPO.Tables[0].Rows.Count + 1;
dstNewDelPO.Tables[0].ImportRow(t);
}
}
dsDelivery.UpdateDataSet(dstNewDelPO);
}
//Update CPODetail
MTR_CPODS dsCPO = new MTR_CPODS();
if (parlCPOIDs.Count > 0)
{
if (Convert.ToInt32(parlCPOIDs[0]) > 0)
{
dsCPO.SetPOMasterID(parlCPOIDs, intMasterID);
}
else
{
dsCPO.SetPOMasterIDForDCPDetail(parlCPOIDs, intMasterID);
}
}
return intMasterID;
}
/// <summary>
/// Add new for case : Convert CPO to new PO
/// </summary>
/// <param name="pobjMasterVO"></param>
/// <param name="pdstDetail"></param>
/// <param name="pdstDelivery"></param>
///<author>TuanDM</author>
public void UpdatePOAndDelScheduleImmediate(object pobjMasterVO, System.Data.DataSet pdstDetail, DataSet pdstDelivery, ArrayList parlCPOIDs)
{
//Add Master and Get returning ID
PO_PurchaseOrderMasterDS dsPOMaster = new PO_PurchaseOrderMasterDS();
dsPOMaster.Update(pobjMasterVO);
var voMaster = (PO_PurchaseOrderMasterVO) pobjMasterVO;
//Update detail
DataSet dstPODetail = pdstDetail.Copy();
PO_PurchaseOrderDetailDS dsPODetail = new PO_PurchaseOrderDetailDS();
foreach (DataRow drowDetail in pdstDetail.Tables[0].Rows)
{
if (drowDetail.RowState == DataRowState.Added)
{
drowDetail[PO_PurchaseOrderDetailTable.PURCHASEORDERMASTERID_FLD] = ((PO_PurchaseOrderMasterVO) pobjMasterVO).PurchaseOrderMasterID;
}
}
dsPODetail.UpdateDataSet(pdstDetail);
//Get PODetail DataSet which includes PURCHASEORDERDETAILID_FLD
pdstDetail = dsPODetail.ListToGetID(((PO_PurchaseOrderMasterVO) pobjMasterVO).PurchaseOrderMasterID);
//Create DeliverySchedule -- and add DeliverySchedule
int intPOLineID =0;
PO_DeliveryScheduleDS dsDelivery = new PO_DeliveryScheduleDS();
for (int i =0; i <dstPODetail.Tables[0].Rows.Count; i++)
{
DataRow[] drowSameDelivery = pdstDelivery.Tables[0].Select(ITM_ProductTable.PRODUCTID_FLD + "='" + dstPODetail.Tables[0].Rows[i][ITM_ProductTable.PRODUCTID_FLD].ToString() + "'");
intPOLineID = (int) pdstDetail.Tables[0].Select(PO_PurchaseOrderDetailTable.LINE_FLD + "='" + dstPODetail.Tables[0].Rows[i][PO_PurchaseOrderDetailTable.LINE_FLD].ToString() + "'")[0][PO_PurchaseOrderDetailTable.PURCHASEORDERDETAILID_FLD];
DataSet dstNewDelPO = dsDelivery.GetDeliverySchedule(intPOLineID);
int intbaseCount = dstNewDelPO.Tables[0].Rows.Count;
for (int j =0; j <drowSameDelivery.Length; j++)
{
int k = GetIndexForDeliveryLine(dstNewDelPO, (DateTime) drowSameDelivery[j][PO_DeliveryScheduleTable.SCHEDULEDATE_FLD]);
if (k > -1)
{
dstNewDelPO.Tables[0].Rows[k][PO_DeliveryScheduleTable.DELIVERYQUANTITY_FLD] = (decimal)dstNewDelPO.Tables[0].Rows[k][PO_DeliveryScheduleTable.DELIVERYQUANTITY_FLD] + (decimal) drowSameDelivery[j][PO_DeliveryScheduleTable.DELIVERYQUANTITY_FLD];
}
else
{
drowSameDelivery[j][PO_DeliveryScheduleTable.PURCHASEORDERDETAILID_FLD] = intPOLineID;
drowSameDelivery[j][PO_DeliveryScheduleTable.DELIVERYLINE_FLD] = j+1 + intbaseCount;
dstNewDelPO.Tables[0].ImportRow(drowSameDelivery[j]);
}
}
dsDelivery.UpdateDataSet(dstNewDelPO);
}
//Update CPODetail
MTR_CPODS dsCPO = new MTR_CPODS();
dsCPO.SetPOMasterID(parlCPOIDs, ((PO_PurchaseOrderMasterVO) pobjMasterVO).PurchaseOrderMasterID);
}
private int GetIndexForDeliveryLine(DataSet pdstData, DateTime dtmDuedate)
{
int i =0;
foreach (DataRow drowData in pdstData.Tables[0].Rows)
{
if ( (DateTime)drowData[PO_DeliveryScheduleTable.SCHEDULEDATE_FLD] == dtmDuedate)
break;
i += 1;
}
if (i == pdstData.Tables[0].Rows.Count) i = -1;
return i;
}
#region Import functions
public int ImportNewMappingData(DataTable dtImpData, int intPartyID, int intCCNID, int intMaxLine, DataSet dstMappingData)
{
int intResult = 0;
int intMaxID = 0;
dstMappingData.Tables[0].DefaultView.Sort = PO_PurchaseOrderDetailTable.PURCHASEORDERDETAILID_FLD;
try
{
intMaxID = int.Parse(dstMappingData.Tables[0].Rows[dstMappingData.Tables[0].Rows.Count - 1][PO_PurchaseOrderDetailTable.PURCHASEORDERDETAILID_FLD].ToString());
}
catch
{
intMaxID = 0;
}
dstMappingData.Tables[0].DefaultView.Sort = string.Empty;
ITM_ProductDS dsProduct = new ITM_ProductDS();
for (int i = INT_BEGIN_DATA_ROW; i < dtImpData.Rows.Count; i++)
{
string strItemCode = dtImpData.Rows[i][INDEX_CODE].ToString();
string strDescription = dtImpData.Rows[i][INDEX_NAME].ToString();
string strRevision = dtImpData.Rows[i][INDEX_REVISION].ToString();
//find out total quantity at last column
decimal dcmOrderQty = decimal.Parse(dtImpData.Rows[i][dtImpData.Columns.Count - 1].ToString());
//check if this item existed, update quantity only
DataRow[] arrRows = dstMappingData.Tables[0].Select(ITM_ProductTable.CODE_FLD + "='" + strItemCode + "'");
if (arrRows.Length > 0)
{
arrRows[0][PO_PurchaseOrderDetailTable.ORDERQUANTITY_FLD] = decimal.Parse(arrRows[0][PO_PurchaseOrderDetailTable.ORDERQUANTITY_FLD].ToString()) + dcmOrderQty;
continue;
}
if (dcmOrderQty <= 0)
continue;
ITM_ProductVO voProduct = (ITM_ProductVO)dsProduct.GetObjectVO(strItemCode, strDescription, strRevision);
//New row
DataRow dr = dstMappingData.Tables[0].NewRow();
UtilsBO boUtils = new UtilsBO();
//fill row
dr[PO_PurchaseOrderDetailTable.ORDERQUANTITY_FLD] = dcmOrderQty;
dr[PO_PurchaseOrderDetailTable.PRODUCTID_FLD] = voProduct.ProductID;
dr[ITM_ProductTable.CODE_FLD] = voProduct.Code;
dr[ITM_ProductTable.DESCRIPTION_FLD] = voProduct.Description;
dr[ITM_ProductTable.REVISION_FLD] = voProduct.Revision;
dr[PO_PurchaseOrderDetailTable.BUYINGUMID_FLD] = voProduct.BuyingUMID;
dr[PO_PurchaseOrderDetailTable.UNITPRICE_FLD] = voProduct.ListPrice;
dr[PO_PurchaseOrderDetailTable.TOTALAMOUNT_FLD] = voProduct.ListPrice * dcmOrderQty;
dr[PO_PurchaseOrderDetailTable.UMRATE_FLD] = boUtils.GetUMRate(voProduct.StockUMID, voProduct.BuyingUMID);
dr[PO_PurchaseOrderDetailTable.LINE_FLD] = ++intMaxLine;
dr[PO_PurchaseOrderDetailTable.PURCHASEORDERDETAILID_FLD] = ++intMaxID;
dr[PO_PurchaseOrderDetailTable.STOCKUMID_FLD] = voProduct.StockUMID;
dstMappingData.Tables[0].Rows.Add(dr);
}
return intResult;
}
public int ImportNewPurchaseOrder(object pvoPOMaster, DataSet pdstDetail)
{
PO_PurchaseOrderMasterDS dsMaster = new PO_PurchaseOrderMasterDS();
int intPOMasterID = dsMaster.AddAndReturnID(pvoPOMaster);
if(pdstDetail.Tables.Count > 0)
{
foreach (DataRow objRow in pdstDetail.Tables[0].Rows)
{
if(objRow.RowState == DataRowState.Deleted)
continue;
objRow[PO_PurchaseOrderDetailTable.PURCHASEORDERMASTERID_FLD] = intPOMasterID;
}
PO_PurchaseOrderDetailDS dsDetail = new PO_PurchaseOrderDetailDS();
dsDetail.UpdateDataSetForImport(pdstDetail, intPOMasterID);
}
return intPOMasterID;
}
public DataSet ListScheduleForImport(int pintMasterID)
{
PO_DeliveryScheduleDS dsDelivery = new PO_DeliveryScheduleDS();
return dsDelivery.ListForImport(pintMasterID);
}
public void UpdateScheduleForImport(DataSet pdstData)
{
PO_DeliveryScheduleDS dsDelivery = new PO_DeliveryScheduleDS();
dsDelivery.UpdateDataSet(pdstData);
}
public void UpdateDeletedRowInDataSet(DataSet pdstDelSchData, int pintPOMasterID)
{
PO_DeliveryScheduleDS dsDelSch = new PO_DeliveryScheduleDS();
dsDelSch.UpdateDeletedRowInDataSet(pdstDelSchData, pintPOMasterID);
}
public int ImportUpdateMappingData(DataTable dtImpData, int intPartyID, int intCCNID, int intMaxLine, DataSet dstMappingData)
{
int intResult = 0;
const string TEMP_QTY_COL_NAME = "TempQty";
//Add new column for temp qty
DataColumn objCol = new DataColumn(TEMP_QTY_COL_NAME);
objCol.DataType = typeof(Decimal);
objCol.DefaultValue = 0;
dstMappingData.Tables[0].Columns.Add(objCol);
int intMaxID = 0;
dstMappingData.Tables[0].DefaultView.Sort = PO_PurchaseOrderDetailTable.PURCHASEORDERDETAILID_FLD;
try
{
intMaxID = int.Parse(dstMappingData.Tables[0].Rows[dstMappingData.Tables[0].Rows.Count - 1][PO_PurchaseOrderDetailTable.PURCHASEORDERDETAILID_FLD].ToString());
}
catch
{
intMaxID = 0;
}
dstMappingData.Tables[0].DefaultView.Sort = string.Empty;
//walk through data
ITM_ProductDS dsProduct = new ITM_ProductDS();
for (int i = INT_BEGIN_DATA_ROW; i < dtImpData.Rows.Count; i++)
{
//findout Item Code
string strItemCode = dtImpData.Rows[i][INDEX_CODE].ToString();
string strDescription = dtImpData.Rows[i][INDEX_NAME].ToString();
string strRevision = dtImpData.Rows[i][INDEX_REVISION].ToString();
//find out total quantity at last column
decimal dcmOrderQty = int.Parse(dtImpData.Rows[i][dtImpData.Columns.Count - 1].ToString());
//check if this item existed, update quantity only
DataRow[] arrRows = dstMappingData.Tables[0].Select(ITM_ProductTable.CODE_FLD + "='" + strItemCode + "'");
if (arrRows.Length > 0)
{
arrRows[0][TEMP_QTY_COL_NAME] = decimal.Parse(arrRows[0][TEMP_QTY_COL_NAME].ToString()) + dcmOrderQty;
continue;
}
if (dcmOrderQty <= 0)
continue;
ITM_ProductVO voProduct = (ITM_ProductVO)dsProduct.GetObjectVO(strItemCode, strDescription, strRevision);
//New row
DataRow dr = dstMappingData.Tables[0].NewRow();
UtilsBO boUtils = new UtilsBO();
//fill row
dr[TEMP_QTY_COL_NAME] = dcmOrderQty;
dr[PO_PurchaseOrderDetailTable.ORDERQUANTITY_FLD] = dcmOrderQty;
dr[PO_PurchaseOrderDetailTable.PRODUCTID_FLD] = voProduct.ProductID;
dr[ITM_ProductTable.CODE_FLD] = voProduct.Code;
dr[ITM_ProductTable.DESCRIPTION_FLD] = voProduct.Description;
dr[ITM_ProductTable.REVISION_FLD] = voProduct.Revision;
dr[PO_PurchaseOrderDetailTable.BUYINGUMID_FLD] = voProduct.BuyingUMID;
dr[PO_PurchaseOrderDetailTable.UNITPRICE_FLD] = voProduct.ListPrice;
dr[PO_PurchaseOrderDetailTable.TOTALAMOUNT_FLD] = voProduct.ListPrice * dcmOrderQty;
dr[PO_PurchaseOrderDetailTable.UMRATE_FLD] = boUtils.GetUMRate(voProduct.StockUMID, voProduct.BuyingUMID);
dr[PO_PurchaseOrderDetailTable.LINE_FLD] = ++intMaxLine;
dr[PO_PurchaseOrderDetailTable.PURCHASEORDERDETAILID_FLD] = ++intMaxID;
dr[PO_PurchaseOrderDetailTable.STOCKUMID_FLD] = voProduct.StockUMID;
dr[TEMP_QTY_COL_NAME] = dcmOrderQty;
dstMappingData.Tables[0].Rows.Add(dr);
}
if (intResult != 0)
{
dstMappingData.Tables[0].Columns.Remove(objCol);
return intResult;
}
//refine data, with correct line
int intLine = 1;
for (int i = 0; i < dstMappingData.Tables[0].Rows.Count; i++)
{
if (int.Parse(dstMappingData.Tables[0].Rows[i][TEMP_QTY_COL_NAME].ToString()) == 0)
dstMappingData.Tables[0].Rows[i][PO_PurchaseOrderDetailTable.LINE_FLD] = -1;
else
{
//Update Line
dstMappingData.Tables[0].Rows[i][PO_PurchaseOrderDetailTable.LINE_FLD] = intLine;
//Update quantity
dstMappingData.Tables[0].Rows[i][PO_PurchaseOrderDetailTable.ORDERQUANTITY_FLD] = dstMappingData.Tables[0].Rows[i][TEMP_QTY_COL_NAME];
intLine++;
}
}
dstMappingData.Tables[0].Columns.Remove(objCol);
return intResult;
}
public DataTable GetRemainQuantity(int pintMasterID)
{
return null;
}
public void UpdateInsertedRowInDataSet(DataSet pdstDelSchData, int pintMasterID)
{
PO_DeliveryScheduleDS dsSchedule = new PO_DeliveryScheduleDS();
dsSchedule.UpdateInsertedRowInDataSet(pdstDelSchData, pintMasterID);
}
public void ImportUpdatePurchaseOrder(int pintPOMasterID, DataSet pdstDetail, ref int pintErrorLine)
{
const string TEMP_QTY_COL_NAME = "TempQty";
const string METHOD_NAME = THIS + ".ImportUpdateSaleOrder()";
const string SUMCOMMITQUANTITY_FLD = "SUMCommitQuantity";
if(pdstDetail.Tables.Count > 0)
{
foreach (DataRow drowDetail in pdstDetail.Tables[0].Rows)
{
if(drowDetail.RowState == DataRowState.Deleted)
continue;
if (int.Parse(drowDetail[PO_PurchaseOrderDetailTable.LINE_FLD].ToString()) == -1)
drowDetail.Delete();
else
drowDetail[PO_PurchaseOrderDetailTable.PURCHASEORDERMASTERID_FLD] = pintPOMasterID;
}
PO_PurchaseOrderDetailDS dsDetail = new PO_PurchaseOrderDetailDS();
//update sale order detail dataset
dsDetail.UpdateDataSetForImport(pdstDetail,pintPOMasterID);
}
pintErrorLine = -1;
}
#endregion
#region Delete estimate purchase order funcation
/// <summary>
/// Delete purchase order of estimate month
/// </summary>
/// <param name="pdtmFromDate">From Date</param>
/// <param name="pdtmToDate">To Date</param>
/// <param name="pintPOType">Purchase Type</param>
/// <param name="pstrVendorID">Vendor List</param>
/// <param name="pstrItemID">Item list</param>
public void DeleteEstimatePO(DateTime pdtmFromDate, DateTime pdtmToDate, int pintPOType, string pstrVendorID, string pstrItemID)
{
PO_PurchaseOrderMasterDS dsMaster = new PO_PurchaseOrderMasterDS();
// get list of purchase order master ID to be delete
DataTable dtbMaster = dsMaster.ListMasterToDelete(pdtmFromDate, pdtmToDate, pintPOType, pstrItemID, pstrItemID).Tables[0];
string strMasterId = dtbMaster.Rows.Cast<DataRow>().Aggregate("0", (current, drowMaster) => current + ("," + drowMaster[PO_PurchaseOrderMasterTable.PURCHASEORDERMASTERID_FLD]));
// delete delivery schedule first
PO_DeliveryScheduleDS dsSchedule = new PO_DeliveryScheduleDS();
dsSchedule.DeleteByPOMaster(strMasterId);
// delete purchase order detail
PO_PurchaseOrderDetailDS dsDetail = new PO_PurchaseOrderDetailDS();
dsDetail.DeleteByMaster(strMasterId);
// delete purchase order master
dsMaster.Delete(strMasterId);
}
#endregion
}
}
| |
/*
* Copyright (c) InWorldz Halcyon Developers
* Copyright (c) Contributors, http://opensimulator.org/
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSim Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Reflection;
using log4net;
using OpenSim.Framework;
using OpenSim.Region.Framework.Interfaces;
namespace OpenSim.Region.CoreModules.Agent.TextureSender
{
/// <summary>
/// A TextureSender handles the process of receiving a texture requested by the client from the
/// AssetCache, and then sending that texture back to the client.
/// </summary>
public class TextureSender : ITextureSender
{
private static readonly ILog m_log
= LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
/// <summary>
/// Records the number of times texture send has been called.
/// </summary>
public int counter = 0;
public bool ImageLoaded = false;
/// <summary>
/// Holds the texture asset to send.
/// </summary>
private AssetBase m_asset;
//public UUID assetID { get { return m_asset.FullID; } }
// private bool m_cancel = false;
// See ITextureSender
// private bool m_sending = false;
/// <summary>
/// This is actually the number of extra packets required to send the texture data! We always assume
/// at least one is required.
/// </summary>
private int NumPackets = 0;
/// <summary>
/// Holds the packet number to send next. In this case, each packet is 1000 bytes long and starts
/// at the 600th byte (0th indexed).
/// </summary>
private int PacketCounter = 0;
private int RequestedDiscardLevel = -1;
private IClientAPI RequestUser;
private uint StartPacketNumber = 0;
public TextureSender(IClientAPI client, int discardLevel, uint packetNumber)
{
RequestUser = client;
RequestedDiscardLevel = discardLevel;
StartPacketNumber = packetNumber;
}
#region ITextureSender Members
public bool Cancel
{
get { return false; }
set
{
// m_cancel = value;
}
}
public bool Sending
{
get { return false; }
set
{
// m_sending = value;
}
}
// See ITextureSender
public void UpdateRequest(int discardLevel, uint packetNumber)
{
RequestedDiscardLevel = discardLevel;
StartPacketNumber = packetNumber;
PacketCounter = (int)StartPacketNumber;
}
// See ITextureSender
public bool SendTexturePacket()
{
//m_log.DebugFormat("[TEXTURE SENDER]: Sending packet for {0}", m_asset.FullID);
SendPacket();
counter++;
if ((NumPackets == 0) || (RequestedDiscardLevel == -1) || (PacketCounter > NumPackets) ||
((RequestedDiscardLevel > 0) && (counter > 50 + (NumPackets / (RequestedDiscardLevel + 1)))))
{
return true;
}
return false;
}
#endregion
/// <summary>
/// Load up the texture data to send.
/// </summary>
/// <param name="asset"></param>
public void TextureReceived(AssetBase asset)
{
m_asset = asset;
NumPackets = CalculateNumPackets(asset.Data.Length);
PacketCounter = (int)StartPacketNumber;
ImageLoaded = true;
}
/// <summary>
/// Sends a texture packet to the client.
/// </summary>
private void SendPacket()
{
if (PacketCounter <= NumPackets)
{
if (PacketCounter == 0)
{
if (NumPackets == 0)
{
RequestUser.SendImageFirstPart(1, m_asset.FullID, (uint)m_asset.Data.Length, m_asset.Data, 2);
PacketCounter++;
}
else
{
byte[] ImageData1 = new byte[600];
Array.Copy(m_asset.Data, 0, ImageData1, 0, 600);
RequestUser.SendImageFirstPart(
(ushort)(NumPackets), m_asset.FullID, (uint)m_asset.Data.Length, ImageData1, 2);
PacketCounter++;
}
}
else
{
int size = m_asset.Data.Length - 600 - (1000 * (PacketCounter - 1));
if (size > 1000) size = 1000;
byte[] imageData = new byte[size];
try
{
Array.Copy(m_asset.Data, 600 + (1000 * (PacketCounter - 1)), imageData, 0, size);
}
catch (ArgumentOutOfRangeException)
{
m_log.Error("[TEXTURE SENDER]: Unable to separate texture into multiple packets: Array bounds failure on asset:" +
m_asset.ID);
return;
}
RequestUser.SendImageNextPart((ushort)PacketCounter, m_asset.FullID, imageData);
PacketCounter++;
}
}
}
/// <summary>
/// Calculate the number of packets that will be required to send the texture loaded into this sender
/// This is actually the number of 1000 byte packets not including an initial 600 byte packet...
/// </summary>
/// <param name="length"></param>
/// <returns></returns>
private int CalculateNumPackets(int length)
{
int numPackets = 0;
if (length > 600)
{
//over 600 bytes so split up file
int restData = (length - 600);
int restPackets = ((restData + 999) / 1000);
numPackets = restPackets;
}
return numPackets;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using JetBrains.Annotations;
using JoinRpg.DataModel;
using JoinRpg.DataModel.Finances;
namespace JoinRpg.Domain
{
public static class FinanceExtensions
{
/// <summary>
/// Returns project fee for a specified date for claim
/// </summary>
private static int ProjectFeeForDate(this Claim claim, DateTime? operationDate)
{
var projectFeeInfo = claim.Project.ProjectFeeInfo(operationDate ?? DateTime.UtcNow);
return (claim.PreferentialFeeUser
? projectFeeInfo?.PreferentialFee
: projectFeeInfo?.Fee) ?? 0;
}
/// <summary>
/// Returns fee info object for a specified date
/// </summary>
private static ProjectFeeSetting? ProjectFeeInfo(this Project project,
DateTime operationDate)
=> project.ProjectFeeSettings.Where(pfs => pfs.StartDate.Date <= operationDate.Date)
.OrderByDescending(pfs => pfs.StartDate.Date).FirstOrDefault();
/// <summary>
/// Returns fee info object for today
/// </summary>
public static ProjectFeeSetting? ProjectFeeInfo(this Project project)
=> project.ProjectFeeInfo(DateTime.UtcNow);
/// <summary>
/// Returns total sum of claim fee and all finance operations
/// </summary>
private static int ClaimTotalFee(this Claim claim, DateTime operationDate, int? fieldsFee)
=> claim.ClaimCurrentFee(operationDate, fieldsFee)
+ claim.ApprovedFinanceOperations.Sum(fo => fo.FeeChange);
/// <summary>
/// Returns total sum of claim fee and all finance operations using current date
/// </summary>
public static int ClaimTotalFee(this Claim claim, int? fieldsFee = null)
=> claim.ClaimTotalFee(DateTime.UtcNow, fieldsFee);
/// <summary>
/// Returns base fee (taken from project settings or claim's property CurrentFee)
/// </summary>
public static int BaseFee(this Claim claim, DateTime? operationDate = null)
=> claim.CurrentFee ?? claim.ProjectFeeForDate(operationDate);
/// <summary>
/// Returns actual fee for a claim (as a sum of claim fee and fields fee) using current date
/// </summary>
public static int ClaimCurrentFee(this Claim claim, int? fieldsFee)
=> claim.ClaimCurrentFee(DateTime.UtcNow, fieldsFee);
/// <summary>
/// Returns actual fee for a claim (as a sum of claim fee and fields fee)
/// </summary>
private static int ClaimCurrentFee(this Claim claim, DateTime operationDate, int? fieldsFee)
{
return claim.BaseFee(operationDate)
+ claim.ClaimFieldsFee(fieldsFee)
+ claim.ClaimAccommodationFee();
/******************************************************************
* If you want to add additional fee to a claim's fee,
* append your value to the expression above.
* Example:
* return claim.BaseFee(operationDate)
* + claim.ClaimFieldsFee(fieldsFee)
* + claim.ClaimAccommodationFee()
* + claim.SomeOtherBigFee();
*****************************************************************/
}
/// <summary>
/// Returns claim payment status from total fee and money balance
/// </summary>
public static ClaimPaymentStatus GetClaimPaymentStatus(int totalFee, int balance)
{
if (totalFee < balance)
{
return ClaimPaymentStatus.Overpaid;
}
else if (totalFee == balance)
{
return ClaimPaymentStatus.Paid;
}
else if (balance > 0)
{
return ClaimPaymentStatus.MoreToPay;
}
else
{
return ClaimPaymentStatus.NotPaid;
}
}
/// <summary>
/// Returns claim payment status from claim' data
/// </summary>
public static ClaimPaymentStatus PaymentStatus(this Claim claim)
=> GetClaimPaymentStatus(claim.ClaimTotalFee(), claim.ClaimBalance());
/// <summary>
/// Returns total sum of all money flow operations
/// </summary>
public static int GetPaymentSum(this Claim claim)
=> claim.FinanceOperations
.Where(fo => fo.Approved && fo.MoneyFlowOperation)
.Sum(fo => fo.MoneyAmount);
/// <summary>
/// Returns current fee of a field with value
/// </summary>
public static int GetCurrentFee(this FieldWithValue self)
{
if (!self.Field.FieldType.SupportsPricing())
{
return 0;
}
return self.Field.FieldType
switch
{
ProjectFieldType.Checkbox => self.HasEditableValue ? self.Field.Price : 0,
ProjectFieldType.Number => self.ToInt() * self.Field.Price,
ProjectFieldType.Dropdown => self.GetDropdownValues().Sum(v => v.Price),
ProjectFieldType.MultiSelect => self.GetDropdownValues().Sum(v => v.Price),
_ => throw new NotSupportedException("Can't calculate pricing"),
};
}
/// <summary>
/// Calculates total fields fee
/// </summary>
private static int CalcClaimFieldsFee(this Claim claim)
{
var values = claim.Project.GetFieldsNotFilledWithoutOrder()
.ToList()
.FillIfEnabled(claim, claim.IsApproved ? claim.Character : null);
return values.Sum(f => f.GetCurrentFee());
}
/// <summary>
/// Returns actual total claim fields fee
/// </summary>
private static int ClaimFieldsFee(this Claim claim, int? fieldsFee)
{
if (fieldsFee == null)
{
fieldsFee = claim.FieldsFee ?? claim.CalcClaimFieldsFee();
}
// cache
claim.FieldsFee = fieldsFee;
return fieldsFee ?? 0;
}
/// <summary>
/// Returns accommodation fee
/// </summary>
public static int ClaimAccommodationFee(this Claim claim)
=> claim.AccommodationRequest?.AccommodationType?.Cost ?? 0;
/// <summary>
/// Returns how many money left to pay
/// </summary>
public static int ClaimFeeDue(this Claim claim)
=> claim.ClaimTotalFee() - claim.ClaimBalance();
/// <summary>
/// Returns sum of all approved finance operations
/// </summary>
public static int ClaimBalance(this Claim claim)
=> claim.ApprovedFinanceOperations.Sum(fo => fo.MoneyAmount);
/// <summary>
/// Returns sum of all unapproved finance operations
/// </summary>
public static int ClaimProposedBalance(this Claim claim)
=> claim.FinanceOperations.Sum(fo =>
fo.State == FinanceOperationState.Proposed ? fo.MoneyAmount : 0);
public static void RequestModerationAccess(this FinanceOperation finance, int currentUserId)
{
if (!finance.Claim.HasAccess(currentUserId,
acl => acl.CanManageMoney) &&
finance.PaymentType?.UserId != currentUserId)
{
throw new NoAccessToProjectException(finance, currentUserId);
}
}
public static bool ClaimPaidInFull(this Claim claim)
=> claim.ClaimBalance() >= claim.ClaimTotalFee();
private static bool ClaimPaidInFull(this Claim claim, DateTime operationDate)
=> claim.ClaimBalance() >= claim.ClaimTotalFee(operationDate.AddDays(-1), null);
public static void UpdateClaimFeeIfRequired(this Claim claim, DateTime operationDate)
{
if (claim.Project.ProjectFeeSettings.Any() //If project has fee
&& claim.CurrentFee == null //and fee not already fixed for claim
&& claim.ClaimPaidInFull(operationDate) //and current fee is payed in full
)
{
claim.CurrentFee = claim.ProjectFeeForDate(operationDate); //fix fee for claim
}
}
[CanBeNull]
public static PaymentType? GetCashPaymentType([NotNull]
this Project project,
int userId)
{
if (project == null)
{
throw new ArgumentNullException(nameof(project));
}
return project.PaymentTypes.SingleOrDefault(pt => pt.UserId == userId && pt.TypeKind == PaymentTypeKind.Cash);
}
public static bool CanAcceptCash([NotNull]
this Project project,
[NotNull]
User user)
{
if (project == null)
{
throw new ArgumentNullException(nameof(project));
}
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
return GetCashPaymentType(project, user.UserId)?.IsActive ?? false;
}
public static IEnumerable<MoneyTransfer> Approved(
this IEnumerable<MoneyTransfer> transfers)
=> transfers.Where(mt => mt.ResultState == MoneyTransferState.Approved);
public static IEnumerable<MoneyTransfer> SendedByMaster(
this IEnumerable<MoneyTransfer> transfers,
User master) => transfers.Where(mt => mt.SenderId == master.UserId);
public static IEnumerable<MoneyTransfer> ReceivedByMaster(
this IEnumerable<MoneyTransfer> transfers,
User master) => transfers.Where(mt => mt.ReceiverId == master.UserId);
public static int SendedByMasterSum(this IReadOnlyCollection<MoneyTransfer> transfers,
User master) => transfers.Approved().SendedByMaster(master).Sum(mt => -mt.Amount);
public static int ReceivedByMasterSum(this IReadOnlyCollection<MoneyTransfer> transfers,
User master) => transfers.Approved().ReceivedByMaster(master).Sum(mt => mt.Amount);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
namespace ParquetColumnTests.Io
{
using System;
using System.Collections.Generic;
using ParquetColumnTests.Column.Page.Mem;
using ParquetSharp;
using ParquetSharp.Column;
using ParquetSharp.Column.Impl;
using ParquetSharp.Column.Page;
using ParquetSharp.Example.Data;
using ParquetSharp.Example.Data.Simple;
using ParquetSharp.Example.Data.Simple.Convert;
using ParquetSharp.External;
using ParquetSharp.IO;
using ParquetSharp.IO.Api;
using ParquetSharp.Schema;
using Xunit;
using static ParquetSharp.Example.Paper;
using static ParquetSharp.Schema.PrimitiveType.PrimitiveTypeName;
using static ParquetSharp.Schema.Type.Repetition;
using Type = ParquetSharp.Schema.Type;
public class TestColumnIO
{
private static readonly Log LOG = Log.getLog(typeof(TestColumnIO));
private const string oneOfEach =
"message Document {\n"
+ " required int64 a;\n"
+ " required int32 b;\n"
+ " required float c;\n"
+ " required double d;\n"
+ " required boolean e;\n"
+ " required binary f;\n"
+ " required int96 g;\n"
+ " required fixed_len_byte_array(3) h;\n"
+ "}\n";
private const string schemaString =
"message Document {\n"
+ " required int64 DocId;\n"
+ " optional group Links {\n"
+ " repeated int64 Backward;\n"
+ " repeated int64 Forward;\n"
+ " }\n"
+ " repeated group Name {\n"
+ " repeated group Language {\n"
+ " required binary Code;\n"
+ " optional binary Country;\n"
+ " }\n"
+ " optional binary Url;\n"
+ " }\n"
+ "}\n";
int[][] expectedFSA = new int[][]
{
new int[] { 1 }, // 0: DocId
new int[] { 2, 1 }, // 1: Links.Backward
new int[] { 3, 2 }, // 2: Links.Forward
new int[] { 4, 4, 4 },// 3: Name.Language.Code
new int[] { 5, 5, 3 },// 4: Name.Language.Country
new int[] { 6, 3 } // 5: Name.Url
};
int[][] expectedFSA2 = new int[][]
{
new int[] { 1 }, // 0: DocId
new int[] { 2, 1, 1 },// 1: Name.Language.Country
};
public static readonly string[] expectedEventsForR1 = {
"startMessage()",
"DocId.addLong(10)",
"Links.start()",
"Links.Forward.addLong(20)",
"Links.Forward.addLong(40)",
"Links.Forward.addLong(60)",
"Links.end()",
"Name.start()",
"Name.Language.start()",
"Name.Language.Code.addBinary(en-us)",
"Name.Language.Country.addBinary(us)",
"Name.Language.end()",
"Name.Language.start()",
"Name.Language.Code.addBinary(en)",
"Name.Language.end()",
"Name.Url.addBinary(http://A)",
"Name.end()",
"Name.start()",
"Name.Url.addBinary(http://B)",
"Name.end()",
"Name.start()",
"Name.Language.start()",
"Name.Language.Code.addBinary(en-gb)",
"Name.Language.Country.addBinary(gb)",
"Name.Language.end()",
"Name.end()",
"endMessage()"
};
private bool useDictionary;
[Theory, InlineData(true), InlineData(false)]
public void testSchema(bool useDictionary)
{
this.useDictionary = useDictionary;
Assert.Equal(schemaString, schema.ToString());
}
[Theory, InlineData(true), InlineData(false)]
public void testReadUsingRequestedSchemaWithExtraFields(bool useDictionary)
{
this.useDictionary = useDictionary;
MessageType orginalSchema = new MessageType("schema",
new PrimitiveType(REQUIRED, INT32, "a"),
new PrimitiveType(OPTIONAL, INT32, "b")
);
MessageType schemaWithExtraField = new MessageType("schema",
new PrimitiveType(OPTIONAL, INT32, "b"),
new PrimitiveType(OPTIONAL, INT32, "a"),
new PrimitiveType(OPTIONAL, INT32, "c")
);
MemPageStore memPageStoreForOriginalSchema = new MemPageStore(1);
MemPageStore memPageStoreForSchemaWithExtraField = new MemPageStore(1);
SimpleGroupFactory groupFactory = new SimpleGroupFactory(orginalSchema);
writeGroups(orginalSchema, memPageStoreForOriginalSchema, groupFactory.newGroup().append("a", 1).append("b", 2));
SimpleGroupFactory groupFactory2 = new SimpleGroupFactory(schemaWithExtraField);
writeGroups(schemaWithExtraField, memPageStoreForSchemaWithExtraField, groupFactory2.newGroup().append("a", 1).append("b", 2).append("c", 3));
{
List<Group> groups = new List<Group>();
groups.AddRange(readGroups(memPageStoreForOriginalSchema, orginalSchema, schemaWithExtraField, 1));
groups.AddRange(readGroups(memPageStoreForSchemaWithExtraField, schemaWithExtraField, schemaWithExtraField, 1));
// TODO: add once we have the support for empty projection
// groups1.AddRange(readGroups(memPageStore3, schema3, schema2, 1));
object[][] expected = {
new object[] { 2, 1, null},
new object[] { 2, 1, 3},
// new object[] { null, null}
};
validateGroups(groups, expected);
}
}
[Theory, InlineData(true), InlineData(false)]
public void testReadUsingRequestedSchemaWithIncompatibleField(bool useDictionary)
{
this.useDictionary = useDictionary;
MessageType originalSchema = new MessageType("schema",
new PrimitiveType(OPTIONAL, INT32, "e"));
MemPageStore store = new MemPageStore(1);
SimpleGroupFactory groupFactory = new SimpleGroupFactory(originalSchema);
writeGroups(originalSchema, store, groupFactory.newGroup().append("e", 4));
try
{
MessageType schemaWithIncompatibleField = new MessageType("schema",
new PrimitiveType(OPTIONAL, BINARY, "e")); // Incompatible schema: different type
readGroups(store, originalSchema, schemaWithIncompatibleField, 1);
Assert.True(false, "should have thrown an incompatible schema exception");
}
catch (ParquetDecodingException e)
{
Assert.Equal("The requested schema is not compatible with the file schema. incompatible types: optional binary e != optional int32 e", e.Message);
}
}
[Theory, InlineData(true), InlineData(false)]
public void testReadUsingSchemaWithRequiredFieldThatWasOptional(bool useDictionary)
{
this.useDictionary = useDictionary;
MessageType originalSchema = new MessageType("schema",
new PrimitiveType(OPTIONAL, INT32, "e"));
MemPageStore store = new MemPageStore(1);
SimpleGroupFactory groupFactory = new SimpleGroupFactory(originalSchema);
writeGroups(originalSchema, store, groupFactory.newGroup().append("e", 4));
try
{
MessageType schemaWithRequiredFieldThatWasOptional = new MessageType("schema",
new PrimitiveType(REQUIRED, INT32, "e")); // Incompatible schema: required when it was optional
readGroups(store, originalSchema, schemaWithRequiredFieldThatWasOptional, 1);
Assert.True(false, "should have thrown an incompatible schema exception");
}
catch (ParquetDecodingException e)
{
Assert.Equal("The requested schema is not compatible with the file schema. incompatible types: required int32 e != optional int32 e", e.Message);
}
}
[Theory, InlineData(true), InlineData(false)]
public void testReadUsingProjectedSchema(bool useDictionary)
{
this.useDictionary = useDictionary;
MessageType orginalSchema = new MessageType("schema",
new PrimitiveType(REQUIRED, INT32, "a"),
new PrimitiveType(REQUIRED, INT32, "b")
);
MessageType projectedSchema = new MessageType("schema",
new PrimitiveType(OPTIONAL, INT32, "b")
);
MemPageStore store = new MemPageStore(1);
SimpleGroupFactory groupFactory = new SimpleGroupFactory(orginalSchema);
writeGroups(orginalSchema, store, groupFactory.newGroup().append("a", 1).append("b", 2));
{
List<Group> groups = new List<Group>();
groups.AddRange(readGroups(store, orginalSchema, projectedSchema, 1));
object[][] expected = {
new object[] {2},
};
validateGroups(groups, expected);
}
}
private void validateGroups(List<Group> groups1, object[][] e1)
{
IEnumerator<Group> i1 = groups1.GetEnumerator();
for (int i = 0; i < e1.Length; i++)
{
object[] objects = e1[i];
Assert.True(i1.MoveNext());
Group next = i1.Current;
for (int j = 0; j < objects.Length; j++)
{
object @object = objects[j];
if (@object == null)
{
Assert.Equal(0, next.getFieldRepetitionCount(j));
}
else {
Assert.Equal(1, next.getFieldRepetitionCount(j));
Assert.Equal(@object, next.getInteger(j, 0));
}
}
}
}
private List<Group> readGroups(MemPageStore memPageStore, MessageType fileSchema, MessageType requestedSchema, int n)
{
ColumnIOFactory columnIOFactory = new ColumnIOFactory(true);
MessageColumnIO columnIO = columnIOFactory.getColumnIO(requestedSchema, fileSchema);
RecordReaderImplementation<Group> recordReader = getRecordReader(columnIO, requestedSchema, memPageStore);
List<Group> groups = new List<Group>();
for (int i = 0; i < n; i++)
{
groups.Add(recordReader.read());
}
return groups;
}
private void writeGroups(MessageType writtenSchema, MemPageStore memPageStore, params Group[] groups)
{
ColumnIOFactory columnIOFactory = new ColumnIOFactory(true);
ColumnWriteStoreV1 columns = newColumnWriteStore(memPageStore);
MessageColumnIO columnIO = columnIOFactory.getColumnIO(writtenSchema);
RecordConsumer recordWriter = columnIO.getRecordWriter(columns);
GroupWriter groupWriter = new GroupWriter(recordWriter, writtenSchema);
foreach (Group group in groups)
{
groupWriter.write(group);
}
recordWriter.flush();
columns.flush();
}
[Theory, InlineData(true), InlineData(false)]
public void testColumnIO(bool useDictionary)
{
this.useDictionary = useDictionary;
log(schema);
log("r1");
log(r1);
log("r2");
log(r2);
MemPageStore memPageStore = new MemPageStore(2);
ColumnWriteStoreV1 columns = newColumnWriteStore(memPageStore);
ColumnIOFactory columnIOFactory = new ColumnIOFactory(true);
{
MessageColumnIO columnIO = columnIOFactory.getColumnIO(schema);
log(columnIO);
RecordConsumer recordWriter = columnIO.getRecordWriter(columns);
GroupWriter groupWriter = new GroupWriter(recordWriter, schema);
groupWriter.write(r1);
groupWriter.write(r2);
recordWriter.flush();
columns.flush();
log(columns);
log("=========");
RecordReaderImplementation<Group> recordReader = getRecordReader(columnIO, schema, memPageStore);
validateFSA(expectedFSA, columnIO, recordReader);
List<Group> records = new List<Group>();
records.Add(recordReader.read());
records.Add(recordReader.read());
int i = 0;
foreach (Group record in records)
{
log("r" + (++i));
log(record);
}
Assert.Equal(r1.ToString(), records[0].ToString());
Assert.Equal(r2.ToString(), records[1].ToString());
}
{
MessageColumnIO columnIO2 = columnIOFactory.getColumnIO(schema2);
List<Group> records = new List<Group>();
RecordReaderImplementation<Group> recordReader = getRecordReader(columnIO2, schema2, memPageStore);
validateFSA(expectedFSA2, columnIO2, recordReader);
records.Add(recordReader.read());
records.Add(recordReader.read());
int i = 0;
foreach (Group record in records)
{
log("r" + (++i));
log(record);
}
Assert.Equal(pr1.ToString(), records[0].ToString());
Assert.Equal(pr2.ToString(), records[1].ToString());
}
}
[Theory, InlineData(true), InlineData(false)]
public void testOneOfEach(bool useDictionary)
{
this.useDictionary = useDictionary;
MessageType oneOfEachSchema = MessageTypeParser.parseMessageType(oneOfEach);
GroupFactory gf = new SimpleGroupFactory(oneOfEachSchema);
Group g1 = gf.newGroup()
.append("a", 1L)
.append("b", 2)
.append("c", 3.0f)
.append("d", 4.0d)
.append("e", true)
.append("f", Binary.fromString("6"))
.append("g", new NanoTime(1234, Epoch.currentTimeMillis() * 1000))
.append("h", Binary.fromString("abc"));
testSchema(oneOfEachSchema, new List<Group> { g1 });
}
[Theory, InlineData(true), InlineData(false)]
public void testRequiredOfRequired(bool useDictionary)
{
this.useDictionary = useDictionary;
MessageType reqreqSchema = MessageTypeParser.parseMessageType(
"message Document {\n"
+ " required group foo {\n"
+ " required int64 bar;\n"
+ " }\n"
+ "}\n");
GroupFactory gf = new SimpleGroupFactory(reqreqSchema);
Group g1 = gf.newGroup();
g1.addGroup("foo").append("bar", 2L);
testSchema(reqreqSchema, new List<Group> { g1 });
}
[Theory, InlineData(true), InlineData(false)]
public void testOptionalRequiredInteraction(bool useDictionary)
{
this.useDictionary = useDictionary;
for (int i = 0; i < 6; i++)
{
Type current = new PrimitiveType(REQUIRED, BINARY, "primitive");
for (int j = 0; j < i; j++)
{
current = new GroupType(REQUIRED, "req" + j, current);
}
MessageType groupSchema = new MessageType("schema" + i, current);
GroupFactory gf = new SimpleGroupFactory(groupSchema);
List<Group> groups = new List<Group>();
Group root = gf.newGroup();
Group currentGroup = root;
for (int j = 0; j < i; j++)
{
currentGroup = currentGroup.addGroup(0);
}
currentGroup.add(0, Binary.fromString("foo"));
groups.Add(root);
testSchema(groupSchema, groups);
}
for (int i = 0; i < 6; i++)
{
Type current = new PrimitiveType(OPTIONAL, BINARY, "primitive");
for (int j = 0; j < i; j++)
{
current = new GroupType(REQUIRED, "req" + j, current);
}
MessageType groupSchema = new MessageType("schema" + (i + 6), current);
GroupFactory gf = new SimpleGroupFactory(groupSchema);
List<Group> groups = new List<Group>();
Group rootDefined = gf.newGroup();
Group rootUndefined = gf.newGroup();
Group currentDefinedGroup = rootDefined;
Group currentUndefinedGroup = rootUndefined;
for (int j = 0; j < i; j++)
{
currentDefinedGroup = currentDefinedGroup.addGroup(0);
currentUndefinedGroup = currentUndefinedGroup.addGroup(0);
}
currentDefinedGroup.add(0, Binary.fromString("foo"));
groups.Add(rootDefined);
groups.Add(rootUndefined);
testSchema(groupSchema, groups);
}
for (int i = 0; i < 6; i++)
{
Type current = new PrimitiveType(OPTIONAL, BINARY, "primitive");
for (int j = 0; j < 6; j++)
{
current = new GroupType(i == j ? OPTIONAL : REQUIRED, "req" + j, current);
}
MessageType groupSchema = new MessageType("schema" + (i + 12), current);
GroupFactory gf = new SimpleGroupFactory(groupSchema);
List<Group> groups = new List<Group>();
Group rootDefined = gf.newGroup();
Group rootUndefined = gf.newGroup();
Group currentDefinedGroup = rootDefined;
Group currentUndefinedGroup = rootUndefined;
for (int j = 0; j < 6; j++)
{
currentDefinedGroup = currentDefinedGroup.addGroup(0);
if (i < j)
{
currentUndefinedGroup = currentUndefinedGroup.addGroup(0);
}
}
currentDefinedGroup.add(0, Binary.fromString("foo"));
groups.Add(rootDefined);
groups.Add(rootUndefined);
testSchema(groupSchema, groups);
}
}
private void testSchema(MessageType messageSchema, List<Group> groups)
{
MemPageStore memPageStore = new MemPageStore(groups.Count);
ColumnWriteStoreV1 columns = newColumnWriteStore(memPageStore);
ColumnIOFactory columnIOFactory = new ColumnIOFactory(true);
MessageColumnIO columnIO = columnIOFactory.getColumnIO(messageSchema);
log(columnIO);
// Write groups.
RecordConsumer recordWriter = columnIO.getRecordWriter(columns);
GroupWriter groupWriter =
new GroupWriter(recordWriter, messageSchema);
foreach (Group group in groups)
{
groupWriter.write(group);
}
recordWriter.flush();
columns.flush();
// Read groups and verify.
RecordReaderImplementation<Group> recordReader =
getRecordReader(columnIO, messageSchema, memPageStore);
foreach (Group group in groups)
{
Group got = recordReader.read();
Assert.Equal(group.ToString(), got.ToString());
}
}
private RecordReaderImplementation<Group> getRecordReader(MessageColumnIO columnIO, MessageType schema, PageReadStore pageReadStore)
{
RecordMaterializer<Group> recordConverter = new GroupRecordConverter(schema);
return (RecordReaderImplementation<Group>)columnIO.getRecordReader(pageReadStore, recordConverter);
}
private void log(object o)
{
LOG.info(o);
}
private void validateFSA<T>(int[][] expectedFSA, MessageColumnIO columnIO, RecordReaderImplementation<T> recordReader)
{
log("FSA: ----");
List<PrimitiveColumnIO> leaves = columnIO.getLeaves();
for (int i = 0; i < leaves.Count; ++i)
{
PrimitiveColumnIO primitiveColumnIO = leaves[i];
log(Arrays.toString(primitiveColumnIO.getFieldPath()));
for (int r = 0; r < expectedFSA[i].Length; r++)
{
int next = expectedFSA[i][r];
log(" " + r + " -> " + (next == leaves.Count ? "end" : Arrays.toString(leaves[next].getFieldPath())) + ": " + recordReader.getNextLevel(i, r));
Assert.Equal(next, recordReader.getNextReader(i, r));
}
}
log("----");
}
[Theory, InlineData(true), InlineData(false)]
public void testPushParser(bool useDictionary)
{
this.useDictionary = useDictionary;
MemPageStore memPageStore = new MemPageStore(1);
ColumnWriteStoreV1 columns = newColumnWriteStore(memPageStore);
MessageColumnIO columnIO = new ColumnIOFactory().getColumnIO(schema);
RecordConsumer recordWriter = columnIO.getRecordWriter(columns);
new GroupWriter(recordWriter, schema).write(r1);
recordWriter.flush();
columns.flush();
RecordReader<bool> recordReader = columnIO.getRecordReader(memPageStore, new ExpectationValidatingConverter(expectedEventsForR1, schema));
recordReader.read();
}
private ColumnWriteStoreV1 newColumnWriteStore(MemPageStore memPageStore)
{
return new ColumnWriteStoreV1(memPageStore,
ParquetProperties.builder()
.withPageSize(800)
.withDictionaryPageSize(800)
.withDictionaryEncoding(useDictionary)
.build());
}
[Theory, InlineData(true), InlineData(false)]
public void testEmptyField(bool useDictionary)
{
this.useDictionary = useDictionary;
MemPageStore memPageStore = new MemPageStore(1);
ColumnWriteStoreV1 columns = newColumnWriteStore(memPageStore);
MessageColumnIO columnIO = new ColumnIOFactory(true).getColumnIO(schema);
RecordConsumer recordWriter = columnIO.getRecordWriter(columns);
recordWriter.startMessage();
recordWriter.startField("DocId", 0);
recordWriter.addLong(0);
recordWriter.endField("DocId", 0);
recordWriter.startField("Links", 1);
try
{
recordWriter.endField("Links", 1);
Assert.True(false, "expected exception because of empty field");
}
catch (ParquetEncodingException e)
{
Assert.Equal("empty fields are illegal, the field should be ommited completely instead", e.Message);
}
}
[Theory, InlineData(true), InlineData(false)]
public void testGroupWriter(bool useDictionary)
{
this.useDictionary = useDictionary;
List<Group> result = new List<Group>();
GroupRecordConverter groupRecordConverter = new GroupRecordConverter(schema);
RecordConsumer groupConsumer = new ConverterConsumer(groupRecordConverter.getRootConverter(), schema);
GroupWriter groupWriter = new GroupWriter(new RecordConsumerLoggingWrapper(groupConsumer), schema);
groupWriter.write(r1);
result.Add(groupRecordConverter.getCurrentRecord());
groupWriter.write(r2);
result.Add(groupRecordConverter.getCurrentRecord());
Assert.Equal(result[0].ToString(), r1.ToString());
Assert.Equal(result[1].ToString(), r2.ToString());
}
[Theory, InlineData(true), InlineData(false)]
public void testWriteWithGroupWriter(bool useDictionary)
{
this.useDictionary = useDictionary;
string[] expected = new[]
{
"[DocId]: 10, r:0, d:0",
"[Links, Forward]: 20, r:0, d:2",
"[Links, Forward]: 40, r:1, d:2",
"[Links, Forward]: 60, r:1, d:2",
"[Links, Backward]: null, r:0, d:1",
"[Name, Language, Code]: en-us, r:0, d:2",
"[Name, Language, Country]: us, r:0, d:3",
"[Name, Language, Code]: en, r:2, d:2",
"[Name, Language, Country]: null, r:2, d:2",
"[Name, Url]: http://A, r:0, d:2",
"[Name, Url]: http://B, r:1, d:2",
"[Name, Language, Code]: null, r:1, d:1",
"[Name, Language, Country]: null, r:1, d:1",
"[Name, Language, Code]: en-gb, r:1, d:2",
"[Name, Language, Country]: gb, r:1, d:3",
"[Name, Url]: null, r:1, d:1",
"[DocId]: 20, r:0, d:0",
"[Links, Backward]: 10, r:0, d:2",
"[Links, Backward]: 30, r:1, d:2",
"[Links, Forward]: 80, r:0, d:2",
"[Name, Url]: http://C, r:0, d:2",
"[Name, Language, Code]: null, r:0, d:1",
"[Name, Language, Country]: null, r:0, d:1"
};
ValidatingColumnWriteStore columns = new ValidatingColumnWriteStore(expected);
MessageColumnIO columnIO = new ColumnIOFactory().getColumnIO(schema);
RecordConsumer recordWriter = columnIO.getRecordWriter(columns);
GroupWriter groupWriter = new GroupWriter(recordWriter, schema);
groupWriter.write(r1);
groupWriter.write(r2);
recordWriter.flush();
columns.validate();
columns.flush();
columns.close();
}
}
sealed class ValidatingColumnWriteStore : ColumnWriteStore
{
private readonly string[] expected;
int counter = 0;
public ValidatingColumnWriteStore(string[] expected)
{
this.expected = expected;
}
public void close()
{
}
public ColumnWriter getColumnWriter(ColumnDescriptor path)
{
return new TestColumnWriter(this, path);
}
public void validate()
{
Assert.Equal(expected.Length, counter);
}
public void endRecord()
{
}
public void flush()
{
}
public long getAllocatedSize()
{
return 0;
}
public long getBufferedSize()
{
return 0;
}
public string memUsageString()
{
return null;
}
class TestColumnWriter : ColumnWriter
{
readonly ValidatingColumnWriteStore store;
readonly ColumnDescriptor path;
public TestColumnWriter(ValidatingColumnWriteStore store, ColumnDescriptor path)
{
this.store = store;
this.path = path;
}
private void validate(object value, int repetitionLevel, int definitionLevel)
{
string valueString = (value == null) ? "null" : value.ToString();
string actual = Arrays.toString(path.getPath()) + ": " + valueString + ", r:" + repetitionLevel + ", d:" + definitionLevel;
Assert.Equal(store.expected[store.counter], actual);
++store.counter;
}
public void writeNull(int repetitionLevel, int definitionLevel)
{
validate(null, repetitionLevel, definitionLevel);
}
public void write(Binary value, int repetitionLevel, int definitionLevel)
{
validate(value.toStringUsingUTF8(), repetitionLevel, definitionLevel);
}
public void write(float value, int repetitionLevel, int definitionLevel)
{
validate(value, repetitionLevel, definitionLevel);
}
public void write(bool value, int repetitionLevel, int definitionLevel)
{
validate(value, repetitionLevel, definitionLevel);
}
public void write(int value, int repetitionLevel, int definitionLevel)
{
validate(value, repetitionLevel, definitionLevel);
}
public void write(long value, int repetitionLevel, int definitionLevel)
{
validate(value, repetitionLevel, definitionLevel);
}
public void close()
{
}
public long getBufferedSizeInMemory()
{
throw new NotSupportedException();
}
public void write(double value, int repetitionLevel, int definitionLevel)
{
validate(value, repetitionLevel, definitionLevel);
}
}
}
}
| |
//---------------------------------------------------------------------------
//
// Copyright (C) Microsoft Corporation. All rights reserved.
//
//---------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Security;
using System.Windows;
using System.Windows.Interop;
using System.Windows.Input.Manipulations;
using System.Windows.Media;
using System.Windows.Threading;
using MS.Win32;
using MS.Internal;
using MS.Internal.PresentationCore;
namespace System.Windows.Input
{
/// <summary>
/// Handles detection of manipulations.
/// </summary>
internal sealed class ManipulationLogic
{
/// <summary>
/// Instantiates an instance of this class.
/// </summary>
internal ManipulationLogic(ManipulationDevice manipulationDevice)
{
_manipulationDevice = manipulationDevice;
}
/// <summary>
/// Hooked up to the manipulation processor and inertia processor's started event.
/// </summary>
/// <SecurityNote>
/// SecurityCrticial: Calls PushEvent.
/// TreatAsSafe: Pushes a ManipulationStarted event, which does not need to be protected.
/// </SecurityNote>
[SecurityCritical, SecurityTreatAsSafe]
private void OnManipulationStarted(object sender, Manipulation2DStartedEventArgs e)
{
PushEvent(new ManipulationStartedEventArgs(
_manipulationDevice,
LastTimestamp,
_currentContainer,
new Point(e.OriginX, e.OriginY)));
}
/// <summary>
/// Hooked up to the manipulation processor and inertia processor's delta event.
/// </summary>
/// <SecurityNote>
/// SecurityCrticial: Calls PushEvent.
/// TreatAsSafe: Pushes a ManipulationDelta event, which does not need to be protected.
/// </SecurityNote>
[SecurityCritical, SecurityTreatAsSafe]
private void OnManipulationDelta(object sender, Manipulation2DDeltaEventArgs e)
{
var deltaArguments = new ManipulationDeltaEventArgs(
_manipulationDevice,
LastTimestamp,
_currentContainer,
new Point(e.OriginX, e.OriginY),
ConvertDelta(e.Delta, null),
ConvertDelta(e.Cumulative, _lastManipulationBeforeInertia),
ConvertVelocities(e.Velocities),
IsInertiaActive);
PushEvent(deltaArguments);
}
/// <summary>
/// Hooked up to the manipulation processor's completed event.
/// </summary>
/// <SecurityNote>
/// SecurityCritical: Calls PushEvent.
/// TreatAsSafe: Pushes a ManipulationInertiaStartingEventArgs event, which doesn't need to be protected.
/// </SecurityNote>
[SecurityCritical, SecurityTreatAsSafe]
private void OnManipulationCompleted(object sender, Manipulation2DCompletedEventArgs e)
{
// Manipulation portion completed.
if (_manualComplete && !_manualCompleteWithInertia)
{
// This is the last event in the sequence.
ManipulationCompletedEventArgs completedArguments = ConvertCompletedArguments(e);
RaiseManipulationCompleted(completedArguments);
}
else
{
// This event will configure inertia, which will start after this event.
_lastManipulationBeforeInertia = ConvertDelta(e.Total, null);
ManipulationInertiaStartingEventArgs inertiaArguments = new ManipulationInertiaStartingEventArgs(
_manipulationDevice,
LastTimestamp,
_currentContainer,
new Point(e.OriginX, e.OriginY),
ConvertVelocities(e.Velocities),
false);
PushEvent(inertiaArguments);
}
_manipulationProcessor = null;
}
/// <summary>
/// Hooked up to the inertia processor's completed event.
/// </summary>
/// <SecurityNote>
/// SecurityCritical: Calls PushEvent.
/// TreatAsSafe: Pushes a ManipulationInertiaStarting event, which doesn't need to be protected.
/// </SecurityNote>
[SecurityCritical, SecurityTreatAsSafe]
private void OnInertiaCompleted(object sender, Manipulation2DCompletedEventArgs e)
{
// Inertia portion completed.
ClearTimer();
if (_manualComplete && _manualCompleteWithInertia)
{
// Another inertia portion was requested
_lastManipulationBeforeInertia = ConvertDelta(e.Total, _lastManipulationBeforeInertia);
ManipulationInertiaStartingEventArgs inertiaArguments = new ManipulationInertiaStartingEventArgs(
_manipulationDevice,
LastTimestamp,
_currentContainer,
new Point(e.OriginX, e.OriginY),
ConvertVelocities(e.Velocities),
true);
PushEvent(inertiaArguments);
}
else
{
// This is the last event in the sequence.
ManipulationCompletedEventArgs completedArguments = ConvertCompletedArguments(e);
RaiseManipulationCompleted(completedArguments);
}
_inertiaProcessor = null;
}
/// <SecurityNote>
/// SecurityCritical: Calls PushEvent.
/// TreatAsSafe: Pushes a ManipulationCompleted event, which doesn't need to be protected.
/// </SecurityNote>
[SecurityCritical, SecurityTreatAsSafe]
private void RaiseManipulationCompleted(ManipulationCompletedEventArgs e)
{
PushEvent(e);
}
/// <summary>
/// Called after a Completed event has been processed.
/// </summary>
internal void OnCompleted()
{
_lastManipulationBeforeInertia = null;
SetContainer(null);
}
/// <summary>
/// Converts an Affine2DOperationCompletedEventArgs object into a ManipulationCompletedEventArgs object.
/// </summary>
private ManipulationCompletedEventArgs ConvertCompletedArguments(Manipulation2DCompletedEventArgs e)
{
return new ManipulationCompletedEventArgs(
_manipulationDevice,
LastTimestamp,
_currentContainer,
new Point(e.OriginX, e.OriginY),
ConvertDelta(e.Total, _lastManipulationBeforeInertia),
ConvertVelocities(e.Velocities),
IsInertiaActive);
}
private static ManipulationDelta ConvertDelta(ManipulationDelta2D delta, ManipulationDelta add)
{
if (add != null)
{
return new ManipulationDelta(
new Vector(delta.TranslationX + add.Translation.X, delta.TranslationY + add.Translation.Y),
AngleUtil.RadiansToDegrees(delta.Rotation) + add.Rotation,
new Vector(delta.ScaleX * add.Scale.X, delta.ScaleY * add.Scale.Y),
new Vector(delta.ExpansionX + add.Expansion.X, delta.ExpansionY + add.Expansion.Y));
}
else
{
return new ManipulationDelta(
new Vector(delta.TranslationX, delta.TranslationY),
AngleUtil.RadiansToDegrees(delta.Rotation),
new Vector(delta.ScaleX, delta.ScaleY),
new Vector(delta.ExpansionX, delta.ExpansionY));
}
}
private static ManipulationVelocities ConvertVelocities(ManipulationVelocities2D velocities)
{
return new ManipulationVelocities(
new Vector(velocities.LinearVelocityX, velocities.LinearVelocityY),
AngleUtil.RadiansToDegrees(velocities.AngularVelocity),
new Vector(velocities.ExpansionVelocityX, velocities.ExpansionVelocityY));
}
/// <summary>
/// Completes any pending manipulation or inerita processing.
/// </summary>
/// <param name="withInertia">
/// If a manipulation is active, specifies whether to continue
/// to an inertia phase (true) or simply end the sequence (true).
/// </param>
internal void Complete(bool withInertia)
{
try
{
_manualComplete = true;
_manualCompleteWithInertia = withInertia;
if (IsManipulationActive)
{
_manipulationProcessor.CompleteManipulation(GetCurrentTimestamp());
}
else if (IsInertiaActive)
{
_inertiaProcessor.Complete(GetCurrentTimestamp());
}
}
finally
{
_manualComplete = false;
_manualCompleteWithInertia = false;
}
}
/// <summary>
/// Gets ManipulationCompletedEventArgs object out of ManipulationInertiaStartingEventArgs
/// </summary>
private ManipulationCompletedEventArgs GetManipulationCompletedArguments(ManipulationInertiaStartingEventArgs e)
{
Debug.Assert(_lastManipulationBeforeInertia != null);
return new ManipulationCompletedEventArgs(
_manipulationDevice,
LastTimestamp,
_currentContainer,
new Point(e.ManipulationOrigin.X, e.ManipulationOrigin.Y),
_lastManipulationBeforeInertia,
e.InitialVelocities,
IsInertiaActive);
}
/// <summary>
/// Starts the inertia phase based on the results of a ManipulationInertiaStarting event.
/// </summary>
internal void BeginInertia(ManipulationInertiaStartingEventArgs e)
{
if (e.CanBeginInertia())
{
_inertiaProcessor = new InertiaProcessor2D();
_inertiaProcessor.Delta += OnManipulationDelta;
_inertiaProcessor.Completed += OnInertiaCompleted;
e.ApplyParameters(_inertiaProcessor);
// Setup a timer to tick the inertia to completion
_inertiaTimer = new DispatcherTimer();
_inertiaTimer.Interval = TimeSpan.FromMilliseconds(15);
_inertiaTimer.Tick += new EventHandler(OnInertiaTick);
_inertiaTimer.Start();
}
else
{
// This is the last event in the sequence.
ManipulationCompletedEventArgs completedArguments = GetManipulationCompletedArguments(e);
RaiseManipulationCompleted(completedArguments);
PushEventsToDevice();
}
}
internal static Int64 GetCurrentTimestamp()
{
// Does QueryPerformanceCounter to get the current time in 100ns units
return MediaContext.CurrentTicks;
}
private void OnInertiaTick(object sender, EventArgs e)
{
// Tick the inertia
if (IsInertiaActive)
{
if (!_inertiaProcessor.Process(GetCurrentTimestamp()))
{
ClearTimer();
}
PushEventsToDevice();
}
else
{
ClearTimer();
}
}
private void ClearTimer()
{
if (_inertiaTimer != null)
{
_inertiaTimer.Stop();
_inertiaTimer = null;
}
}
/// <summary>
/// Prepares and raises a manipulation event.
/// </summary>
/// <SecurityNote>
/// Critical: Adds an input event to a list that will eventually be added to the InputManager queue.
/// Accesses _generatedEvent.
/// </SecurityNote>
[SecurityCritical]
private void PushEvent(InputEventArgs e)
{
// We only expect to generate one event at a time and should never need a queue.
Debug.Assert(_generatedEvent == null, "There is already a generated event waiting to be pushed.");
_generatedEvent = e;
}
/// <summary>
/// Pushes generated events to the inertia input provider.
/// </summary>
/// <SecurityNote>
/// SecurityCritical: ProcessManipulationInput. Accesses _generatedEvent.
/// TreatAsSafe: OK to send manipulation and inertia events.
/// </SecurityNote>
[SecurityCritical, SecurityTreatAsSafe]
internal void PushEventsToDevice()
{
if (_generatedEvent != null)
{
InputEventArgs generatedEvent = _generatedEvent;
_generatedEvent = null;
_manipulationDevice.ProcessManipulationInput(generatedEvent);
}
}
/// <summary>
/// Raises ManipulationBoundaryFeedback to allow handlers to provide feedback that manipulation has hit an edge.
/// </summary>
/// <param name="unusedManipulation">The total unused manipulation.</param>
/// <SecurityNote>
/// SecurityCrticial: Calls PushEvent.
/// TreatAsSafe: Pushes a ManipulationBoundaryFeedbackEventArgs event, which does not need to be protected.
/// </SecurityNote>
[SecurityCritical, SecurityTreatAsSafe]
internal void RaiseBoundaryFeedback(ManipulationDelta unusedManipulation, bool requestedComplete)
{
bool hasUnusedManipulation = (unusedManipulation != null);
if ((!hasUnusedManipulation || requestedComplete) && HasPendingBoundaryFeedback)
{
// Create a "zero" message to end currently pending feedback
unusedManipulation = new ManipulationDelta(new Vector(), 0.0, new Vector(1.0, 1.0), new Vector());
HasPendingBoundaryFeedback = false;
}
else if (hasUnusedManipulation)
{
HasPendingBoundaryFeedback = true;
}
if (unusedManipulation != null)
{
PushEvent(new ManipulationBoundaryFeedbackEventArgs(_manipulationDevice, LastTimestamp, _currentContainer, unusedManipulation));
}
}
private bool HasPendingBoundaryFeedback
{
get;
set;
}
private int LastTimestamp
{
get;
set;
}
internal void ReportFrame(ICollection<IManipulator> manipulators)
{
Int64 timestamp = GetCurrentTimestamp();
// InputEventArgs timestamps are Int32 while the processors take Int64
// GetMessageTime() is used for all other InputEventArgs, such as mouse and keyboard input.
// And it does not match QueryPerformanceCounter(), my experiments show GetMessageTime() is ~ 120ms ahead.
LastTimestamp = SafeNativeMethods.GetMessageTime();
int numManipulators = manipulators.Count;
if (IsInertiaActive && (numManipulators > 0))
{
// Inertia is active but now there are fingers, stop inertia
_inertiaProcessor.Complete(timestamp);
PushEventsToDevice();
}
if (!IsManipulationActive && (numManipulators > 0))
{
// Time to start a new manipulation
ManipulationStartingEventArgs startingArgs = RaiseStarting();
if (!startingArgs.RequestedCancel && (startingArgs.Mode != ManipulationModes.None))
{
// Determine if we allow single-finger manipulation
if (startingArgs.IsSingleTouchEnabled || (numManipulators >= 2))
{
SetContainer(startingArgs.ManipulationContainer);
_mode = startingArgs.Mode;
_pivot = startingArgs.Pivot;
IList<ManipulationParameters2D> parameters = startingArgs.Parameters;
_manipulationProcessor = new ManipulationProcessor2D(ConvertMode(_mode), ConvertPivot(_pivot));
if (parameters != null)
{
int count = parameters.Count;
for (int i = 0; i < parameters.Count; i++)
{
_manipulationProcessor.SetParameters(parameters[i]);
}
}
_manipulationProcessor.Started += OnManipulationStarted;
_manipulationProcessor.Delta += OnManipulationDelta;
_manipulationProcessor.Completed += OnManipulationCompleted;
_currentManipulators.Clear();
}
}
}
if (IsManipulationActive)
{
// A manipulation process is available to process this frame of manipulators
UpdateManipulators(manipulators);
_manipulationProcessor.ProcessManipulators(timestamp, CurrentManipulators);
PushEventsToDevice();
}
}
/// <SecurityNote>
/// Critical - Calls ProcessManipulationInput.
/// TreatAsSafe - Creates the event being raised itself, an event that is not considered critical.
/// </SecurityNote>
[SecurityCritical, SecurityTreatAsSafe]
private ManipulationStartingEventArgs RaiseStarting()
{
ManipulationStartingEventArgs starting = new ManipulationStartingEventArgs(_manipulationDevice, Environment.TickCount);
starting.ManipulationContainer = _manipulationDevice.Target;
_manipulationDevice.ProcessManipulationInput(starting);
return starting;
}
internal IInputElement ManipulationContainer
{
get { return _currentContainer; }
set
{
//
SetContainer(value);
}
}
internal ManipulationModes ManipulationMode
{
get { return _mode; }
set
{
_mode = value;
if (_manipulationProcessor != null)
{
_manipulationProcessor.SupportedManipulations = ConvertMode(_mode);
}
}
}
private static Manipulations2D ConvertMode(ManipulationModes mode)
{
Manipulations2D manipulations = Manipulations2D.None;
if ((mode & ManipulationModes.TranslateX) != 0)
{
manipulations |= Manipulations2D.TranslateX;
}
if ((mode & ManipulationModes.TranslateY) != 0)
{
manipulations |= Manipulations2D.TranslateY;
}
if ((mode & ManipulationModes.Scale) != 0)
{
manipulations |= Manipulations2D.Scale;
}
if ((mode & ManipulationModes.Rotate) != 0)
{
manipulations |= Manipulations2D.Rotate;
}
return manipulations;
}
internal ManipulationPivot ManipulationPivot
{
get { return _pivot; }
set
{
_pivot = value;
if (_manipulationProcessor != null)
{
_manipulationProcessor.Pivot = ConvertPivot(value);
}
}
}
private static ManipulationPivot2D ConvertPivot(ManipulationPivot pivot)
{
if (pivot != null)
{
Point center = pivot.Center;
return new ManipulationPivot2D()
{
X = (float)center.X,
Y = (float)center.Y,
Radius = (float)Math.Max(1.0, pivot.Radius)
};
}
return null;
}
internal void SetManipulationParameters(ManipulationParameters2D parameter)
{
if (_manipulationProcessor != null)
{
_manipulationProcessor.SetParameters(parameter);
}
}
private void UpdateManipulators(ICollection<IManipulator> updatedManipulators)
{
// Clear out the old removed collection and use it to store
// the new current collection. The old current collection
// will be used to generate the new removed collection.
_removedManipulators.Clear();
var temp = _removedManipulators;
_removedManipulators = _currentManipulators;
_currentManipulators = temp;
// End the manipulation if the element is not
// visible anymore
UIElement uie = _currentContainer as UIElement;
if (uie != null)
{
if (!uie.IsVisible)
{
return;
}
}
else
{
UIElement3D uie3D = _currentContainer as UIElement3D;
if (uie3D != null &&
!uie3D.IsVisible)
{
return;
}
}
// For each updated manipulator, convert it to the correct format in the
// current collection and remove it from the removed collection. What is left
// in the removed collection will be the manipulators that were removed.
foreach (IManipulator updatedManipulator in updatedManipulators)
{
//
int id = updatedManipulator.Id;
_removedManipulators.Remove(id); // This manipulator was not removed
Point position = updatedManipulator.GetPosition(_currentContainer);
position = _manipulationDevice.GetTransformedManipulatorPosition(position);
_currentManipulators[id] = new Manipulator2D(id, (float)position.X, (float)position.Y);
}
}
/// <SecurityNote>
/// Critical - Calls PresentationSource.CriticalFromVisual.
/// TreatAsSafe - Does not expose PresentationSource itself.
/// </SecurityNote>
[SecurityCritical, SecurityTreatAsSafe]
private void SetContainer(IInputElement newContainer)
{
// unsubscribe from LayoutUpdated
UnsubscribeFromLayoutUpdated();
// clear cached values
_containerPivotPoint = new Point();
_containerSize = new Size();
_root = null;
// remember the new container
_currentContainer = newContainer;
if (newContainer != null)
{
// get the new root
PresentationSource presentationSource = PresentationSource.CriticalFromVisual((Visual)newContainer);
if (presentationSource != null)
{
_root = presentationSource.RootVisual as UIElement;
}
// subscribe to LayoutUpdated
if (_containerLayoutUpdated != null)
{
SubscribeToLayoutUpdated();
}
}
}
internal event EventHandler<EventArgs> ContainerLayoutUpdated
{
add
{
bool wasNull = _containerLayoutUpdated == null;
_containerLayoutUpdated += value;
// if this is the first handler, try to subscribe to LayoutUpdated event
if (wasNull && _containerLayoutUpdated != null)
{
SubscribeToLayoutUpdated();
}
}
remove
{
bool wasNull = _containerLayoutUpdated == null;
_containerLayoutUpdated -= value;
// if this is the last handler, unsubscribe from LayoutUpdated event
if (!wasNull && _containerLayoutUpdated == null)
{
UnsubscribeFromLayoutUpdated();
}
}
}
private void SubscribeToLayoutUpdated()
{
UIElement container = _currentContainer as UIElement;
if (container != null)
{
container.LayoutUpdated += OnLayoutUpdated;
}
}
private void UnsubscribeFromLayoutUpdated()
{
UIElement container = _currentContainer as UIElement;
if (container != null)
{
container.LayoutUpdated -= OnLayoutUpdated;
}
}
/// <summary>
/// OnLayoutUpdated handler, raises ContainerLayoutUpdated event if container's position or size have been changed
/// since the last LayoutUpdate.
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void OnLayoutUpdated(object sender, EventArgs e)
{
Debug.Assert(_containerLayoutUpdated != null);
//check position and size and update the cached values
if (UpdateCachedPositionAndSize())
{
_containerLayoutUpdated(this, EventArgs.Empty);
}
}
private bool UpdateCachedPositionAndSize()
{
// Determine if the manipulation needs to be updated because of position or size change.
// * Size change is detected by comparing RenderSize
// * Position change is detected by translating PivotPoint to the element coordinate, in general
// this is not accurate because rotation over PivotPoint won't be detected but the PivotPoint is selected far outside
// of the Window bounds, so practically that should be a very rare case.
// The more accurate solution would require 2 or 3 points which is more expensive.
if (_root == null)
{
return false;
}
UIElement container = _currentContainer as UIElement;
if (container == null)
{
return false;
}
Size renderSize = container.RenderSize;
Point translatedPivotPoint = _root.TranslatePoint(LayoutUpdateDetectionPivotPoint, container);
bool changed = (!DoubleUtil.AreClose(renderSize, _containerSize) ||
!DoubleUtil.AreClose(translatedPivotPoint, _containerPivotPoint));
if (changed)
{
// update cached values
_containerSize = renderSize;
_containerPivotPoint = translatedPivotPoint;
}
return changed;
}
private IEnumerable<Manipulator2D> CurrentManipulators
{
get { return (_currentManipulators.Count > 0) ? _currentManipulators.Values : null; }
}
internal bool IsManipulationActive
{
get { return _manipulationProcessor != null; }
}
private bool IsInertiaActive
{
get { return _inertiaProcessor != null; }
}
private ManipulationDevice _manipulationDevice;
private IInputElement _currentContainer;
private ManipulationPivot _pivot;
private ManipulationModes _mode;
private ManipulationProcessor2D _manipulationProcessor;
private InertiaProcessor2D _inertiaProcessor;
// A list of manipulators that are currently active (i.e. fingers touching the screen)
private Dictionary<int, Manipulator2D> _currentManipulators = new Dictionary<int, Manipulator2D>(2);
// A list of manipulators that have been removed (stored to avoid allocating each frame)
private Dictionary<int, Manipulator2D> _removedManipulators = new Dictionary<int, Manipulator2D>(2);
// When inertia starts, its values are relative to the end point specified in
// this event. WPF's API wants to expose inertia deltas relative to the first
// Started event. This Completed event provides enough information to convert
// the delta values so that they are relative to the Started event.
private ManipulationDelta _lastManipulationBeforeInertia;
/// <SecurityNote>
/// Critical: This event is sent to the input manager queue -- possible spoofing vector.
/// </SecurityNote>
[SecurityCritical]
private InputEventArgs _generatedEvent;
private DispatcherTimer _inertiaTimer;
private bool _manualComplete;
private bool _manualCompleteWithInertia;
private EventHandler<EventArgs> _containerLayoutUpdated;
// pivot point to detect position and size change, see UpdateCachedPositionAndSize for more details
// The odd magic number is to make it more rare.
private static readonly Point LayoutUpdateDetectionPivotPoint = new Point(-10234.1234, -10234.1234);
// cached values to detect position and size change
private Point _containerPivotPoint;
private Size _containerSize;
private UIElement _root;
}
}
| |
//----------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//----------------------------------------------------------------
namespace System.ServiceModel.Activities
{
using System;
using System.Activities;
using System.Activities.Expressions;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Runtime;
using System.Runtime.DurableInstancing;
using System.ServiceModel;
using System.ServiceModel.Activities.Dispatcher;
using System.ServiceModel.Channels;
using System.Xaml;
using Microsoft.VisualBasic.Activities;
using SR2 = System.ServiceModel.Activities.SR;
static class MessagingActivityHelper
{
static Type faultExceptionType = typeof(FaultException);
static Type faultExceptionGenericType = typeof(FaultException<>);
public const string ActivityInstanceId = "ActivityInstanceId";
public const string ActivityName = "ActivityName";
public const string ActivityType = "ActivityType";
public const string ActivityTypeExecuteUserCode = "ExecuteUserCode";
public const string MessagingActivityTypeActivityExecution = "MessagingActivityExecution";
public const string E2EActivityId = "E2EActivityId";
public const string MessageId = "MessageId";
public const string ActivityNameWorkflowOperationInvoke = "WorkflowOperationInvoke";
public const string MessageCorrelationReceiveRecord = "MessageCorrelationReceiveRecord";
public const string MessageCorrelationSendRecord = "MessageCorrelationSendRecord";
public static void FixMessageArgument(Argument messageArgument, ArgumentDirection direction, ActivityMetadata metadata)
{
Type messageType = (messageArgument == null) ? TypeHelper.ObjectType : messageArgument.ArgumentType;
AddRuntimeArgument(messageArgument, "Message", messageType, direction, metadata);
}
public static void AddRuntimeArgument(Argument messageArgument, string runtimeArgumentName, Type runtimeArgumentType,
ArgumentDirection runtimeArgumentDirection, ActivityMetadata metadata)
{
RuntimeArgument argument = new RuntimeArgument(runtimeArgumentName, runtimeArgumentType, runtimeArgumentDirection);
metadata.Bind(messageArgument, argument);
metadata.AddArgument(argument);
}
//
public static IList<T> GetCallbacks<T>(ExecutionProperties executionProperties)
where T : class
{
List<T> list = null;
if (!executionProperties.IsEmpty)
{
T temp;
foreach (KeyValuePair<string, object> item in executionProperties)
{
temp = item.Value as T;
if (temp != null)
{
if (list == null)
{
list = new List<T>();
}
list.Add(temp);
}
}
}
return list;
}
public static Message InitializeCorrelationHandles(NativeActivityContext context,
CorrelationHandle selectHandle, CorrelationHandle ambientHandle, Collection<CorrelationInitializer> additionalCorrelations,
CorrelationKeyCalculator keyCalculator, Message message)
{
InstanceKey instanceKey;
ICollection<InstanceKey> additionalKeys;
//
MessageBuffer buffer = message.CreateBufferedCopy(int.MaxValue);
if (keyCalculator.CalculateKeys(buffer, message, out instanceKey, out additionalKeys))
{
InitializeCorrelationHandles(context, selectHandle, ambientHandle, additionalCorrelations, instanceKey, additionalKeys);
}
return buffer.CreateMessage();
}
public static void InitializeCorrelationHandles(NativeActivityContext context,
CorrelationHandle selectHandle, CorrelationHandle ambientHandle, Collection<CorrelationInitializer> additionalCorrelations,
MessageProperties messageProperties)
{
CorrelationMessageProperty correlationMessageProperty;
if (CorrelationMessageProperty.TryGet(messageProperties, out correlationMessageProperty))
{
InitializeCorrelationHandles(context, selectHandle, ambientHandle, additionalCorrelations,
correlationMessageProperty.CorrelationKey, correlationMessageProperty.AdditionalKeys);
}
}
// both receive and send initialize correlations using this method
// if selectHandle is not null, we first try to initalize instanceKey with it , else we try to initalize the ambient handle
// if ambient handle is not used for initializing instance key , we might use it for initalizing queryCorrelationsInitalizer.
// SelectHandle usage:
// Receive: selectHandle is the correlatesWith handle
// SendReply: in case of context based correlation, this is the context handle
// Send: in case of context based correlation, this will be the callback handle
// ReceiveReply: selectHandle will be always null
// Note that only Receive can initialize a content based correlation with a selectHandle (parallel convoy)
internal static void InitializeCorrelationHandles(NativeActivityContext context,
CorrelationHandle selectHandle, CorrelationHandle ambientHandle, Collection<CorrelationInitializer> additionalCorrelations,
InstanceKey instanceKey, ICollection<InstanceKey> additionalKeys)
{
bool isAmbientHandleUsed = false;
if (instanceKey != null && instanceKey.IsValid)
{
if (selectHandle != null)
{
selectHandle.InitializeBookmarkScope(context, instanceKey);
}
else if (ambientHandle != null)
{
ambientHandle.InitializeBookmarkScope(context, instanceKey);
isAmbientHandleUsed = true;
}
else if (context.DefaultBookmarkScope.IsInitialized)
{
if (context.DefaultBookmarkScope.Id != instanceKey.Value)
{
throw FxTrace.Exception.AsError(
new InvalidOperationException(SR2.CorrelationHandleInUse(context.DefaultBookmarkScope.Id, instanceKey.Value)));
}
}
else
{
context.DefaultBookmarkScope.Initialize(context, instanceKey.Value);
}
}
if (additionalKeys != null && additionalCorrelations != null)
{
// The ordering of items in SelectAdditional and additional correlations are the same
// Therefore, we assign keys iteratively
IEnumerator<CorrelationInitializer> enumerator = additionalCorrelations.GetEnumerator();
foreach (InstanceKey key in additionalKeys)
{
Fx.Assert(key != null && key.IsValid, "only valid keys should be passed into InitializeCorrelationHandles");
while (enumerator.MoveNext())
{
QueryCorrelationInitializer queryCorrelation = enumerator.Current as QueryCorrelationInitializer;
if (queryCorrelation != null)
{
CorrelationHandle handle = (queryCorrelation.CorrelationHandle != null ? queryCorrelation.CorrelationHandle.Get(context) : null);
if (handle == null)
{
if (ambientHandle != null && !isAmbientHandleUsed)
{
handle = ambientHandle;
isAmbientHandleUsed = true;
}
else
{
throw FxTrace.Exception.AsError(
new InvalidOperationException(SR2.QueryCorrelationInitializerCannotBeInitialized));
}
}
handle.InitializeBookmarkScope(context, key);
break;
}
}
}
}
}
public static CorrelationCallbackContext CreateCorrelationCallbackContext(MessageProperties messageProperties)
{
CallbackContextMessageProperty callbackMessageContextProperty;
if (CallbackContextMessageProperty.TryGet(messageProperties, out callbackMessageContextProperty))
{
EndpointAddress listenAddress;
IDictionary<string, string> context;
callbackMessageContextProperty.GetListenAddressAndContext(out listenAddress, out context);
return new CorrelationCallbackContext
{
ListenAddress = EndpointAddress10.FromEndpointAddress(listenAddress),
Context = context
};
}
return null;
}
public static CorrelationContext CreateCorrelationContext(MessageProperties messageProperties)
{
ContextMessageProperty contextMessageProperty;
if (ContextMessageProperty.TryGet(messageProperties, out contextMessageProperty))
{
IDictionary<string, string> context;
context = contextMessageProperty.Context;
return new CorrelationContext
{
Context = context
};
}
return null;
}
public static bool CompareContextEquality(IDictionary<string, string> context1, IDictionary<string, string> context2)
{
if (context1 != context2)
{
if (context1 == null ||
context2 == null ||
context1.Count != context2.Count)
{
return false;
}
foreach (KeyValuePair<string, string> pair in context1)
{
if (!context2.Contains(pair))
{
return false;
}
}
}
return true;
}
public static InArgument<CorrelationHandle> CreateReplyCorrelatesWith(InArgument<CorrelationHandle> requestCorrelatesWith)
{
Fx.Assert(requestCorrelatesWith != null, "Argument cannot be null!");
VariableValue<CorrelationHandle> variableValue = requestCorrelatesWith.Expression as VariableValue<CorrelationHandle>;
if (variableValue != null)
{
return new InArgument<CorrelationHandle>(variableValue.Variable);
}
VisualBasicValue<CorrelationHandle> vbvalue = requestCorrelatesWith.Expression as VisualBasicValue<CorrelationHandle>;
if (vbvalue != null)
{
return new InArgument<CorrelationHandle>(new VisualBasicValue<CorrelationHandle>(vbvalue.ExpressionText));
}
// We use XAML roundtrip to clone expression
string xamlStr = XamlServices.Save(requestCorrelatesWith.Expression);
object obj = XamlServices.Parse(xamlStr);
Activity<CorrelationHandle> expression = obj as Activity<CorrelationHandle>;
Fx.Assert(expression != null, "Failed to clone CorrelationHandle using XAML roundtrip!");
return new InArgument<CorrelationHandle>(expression);
}
public static void ValidateCorrelationInitializer(ActivityMetadata metadata, Collection<CorrelationInitializer> correlationInitializers, bool isReply, string displayName, string operationName)
{
Fx.Assert(metadata != null, "cannot be null");
if (correlationInitializers != null && correlationInitializers.Count > 0)
{
bool queryInitializerWithEmptyHandle = false;
foreach (CorrelationInitializer correlation in correlationInitializers)
{
if (correlation is RequestReplyCorrelationInitializer && isReply)
{
// This is a reply, so additional correlations should not have a request reply handle
metadata.AddValidationError(SR.ReplyShouldNotIncludeRequestReplyHandle(displayName, operationName));
}
QueryCorrelationInitializer queryCorrelation = correlation as QueryCorrelationInitializer;
if (queryCorrelation != null)
{
if (queryCorrelation.MessageQuerySet.Count == 0)
{
metadata.AddValidationError(SR.QueryCorrelationInitializerWithEmptyMessageQuerySet(displayName, operationName));
}
}
if (correlation.CorrelationHandle == null)
{
if (correlation is QueryCorrelationInitializer)
{
if (!queryInitializerWithEmptyHandle)
{
queryInitializerWithEmptyHandle = true;
}
else
{
// more than one queryInitializer present, in this case we don't permit null handle
metadata.AddValidationError(SR.NullCorrelationHandleInMultipleQueryCorrelation);
}
}
else
{
metadata.AddValidationError(SR.NullCorrelationHandleInInitializeCorrelation(correlation.GetType().Name));
}
}
}
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace JIT.HardwareIntrinsics.X86
{
public static partial class Program
{
private static void InsertUInt64129()
{
var test = new SimpleUnaryOpTest__InsertUInt64129();
try
{
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (Sse2.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
// Validates basic functionality works, using LoadAligned
test.RunBasicScenario_LoadAligned();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (Sse2.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
// Validates calling via reflection works, using LoadAligned
test.RunReflectionScenario_LoadAligned();
}
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (Sse2.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
// Validates passing a local works, using LoadAligned
test.RunLclVarScenario_LoadAligned();
}
// Validates passing the field of a local works
test.RunLclFldScenario();
// Validates passing an instance member works
test.RunFldScenario();
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
}
catch (PlatformNotSupportedException)
{
test.Succeeded = true;
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class SimpleUnaryOpTest__InsertUInt64129
{
private const int VectorSize = 16;
private const int Op1ElementCount = VectorSize / sizeof(UInt64);
private const int RetElementCount = VectorSize / sizeof(UInt64);
private static UInt64[] _data = new UInt64[Op1ElementCount];
private static Vector128<UInt64> _clsVar;
private Vector128<UInt64> _fld;
private SimpleUnaryOpTest__DataTable<UInt64, UInt64> _dataTable;
static SimpleUnaryOpTest__InsertUInt64129()
{
var random = new Random();
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = (ulong)0; }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt64>, byte>(ref _clsVar), ref Unsafe.As<UInt64, byte>(ref _data[0]), VectorSize);
}
public SimpleUnaryOpTest__InsertUInt64129()
{
Succeeded = true;
var random = new Random();
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = (ulong)0; }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt64>, byte>(ref _fld), ref Unsafe.As<UInt64, byte>(ref _data[0]), VectorSize);
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = (ulong)0; }
_dataTable = new SimpleUnaryOpTest__DataTable<UInt64, UInt64>(_data, new UInt64[RetElementCount], VectorSize);
}
public bool IsSupported => Sse41.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
var result = Sse41.Insert(
Unsafe.Read<Vector128<UInt64>>(_dataTable.inArrayPtr),
(ulong)2,
129
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
var result = Sse41.Insert(
Sse2.LoadVector128((UInt64*)(_dataTable.inArrayPtr)),
(ulong)2,
129
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_LoadAligned()
{
var result = Sse41.Insert(
Sse2.LoadAlignedVector128((UInt64*)(_dataTable.inArrayPtr)),
(ulong)2,
129
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
var result = typeof(Sse41).GetMethod(nameof(Sse41.Insert), new Type[] { typeof(Vector128<UInt64>), typeof(UInt64), typeof(byte) })
.Invoke(null, new object[] {
Unsafe.Read<Vector128<UInt64>>(_dataTable.inArrayPtr),
(ulong)2,
(byte)129
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<UInt64>)(result));
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
var result = typeof(Sse41).GetMethod(nameof(Sse41.Insert), new Type[] { typeof(Vector128<UInt64>), typeof(UInt64), typeof(byte) })
.Invoke(null, new object[] {
Sse2.LoadVector128((UInt64*)(_dataTable.inArrayPtr)),
(ulong)2,
(byte)129
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<UInt64>)(result));
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_LoadAligned()
{
var result = typeof(Sse41).GetMethod(nameof(Sse41.Insert), new Type[] { typeof(Vector128<UInt64>), typeof(UInt64), typeof(byte) })
.Invoke(null, new object[] {
Sse2.LoadAlignedVector128((UInt64*)(_dataTable.inArrayPtr)),
(ulong)2,
(byte)129
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<UInt64>)(result));
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
var result = Sse41.Insert(
_clsVar,
(ulong)2,
129
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_UnsafeRead()
{
var firstOp = Unsafe.Read<Vector128<UInt64>>(_dataTable.inArrayPtr);
var result = Sse41.Insert(firstOp, (ulong)2, 129);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(firstOp, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
var firstOp = Sse2.LoadVector128((UInt64*)(_dataTable.inArrayPtr));
var result = Sse41.Insert(firstOp, (ulong)2, 129);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(firstOp, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_LoadAligned()
{
var firstOp = Sse2.LoadAlignedVector128((UInt64*)(_dataTable.inArrayPtr));
var result = Sse41.Insert(firstOp, (ulong)2, 129);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(firstOp, _dataTable.outArrayPtr);
}
public void RunLclFldScenario()
{
var test = new SimpleUnaryOpTest__InsertUInt64129();
var result = Sse41.Insert(test._fld, (ulong)2, 129);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld, _dataTable.outArrayPtr);
}
public void RunFldScenario()
{
var result = Sse41.Insert(_fld, (ulong)2, 129);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld, _dataTable.outArrayPtr);
}
public void RunUnsupportedScenario()
{
Succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
Succeeded = true;
}
}
private void ValidateResult(Vector128<UInt64> firstOp, void* result, [CallerMemberName] string method = "")
{
UInt64[] inArray = new UInt64[Op1ElementCount];
UInt64[] outArray = new UInt64[RetElementCount];
Unsafe.Write(Unsafe.AsPointer(ref inArray[0]), firstOp);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt64, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize);
ValidateResult(inArray, outArray, method);
}
private void ValidateResult(void* firstOp, void* result, [CallerMemberName] string method = "")
{
UInt64[] inArray = new UInt64[Op1ElementCount];
UInt64[] outArray = new UInt64[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt64, byte>(ref inArray[0]), ref Unsafe.AsRef<byte>(firstOp), VectorSize);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt64, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize);
ValidateResult(inArray, outArray, method);
}
private void ValidateResult(UInt64[] firstOp, UInt64[] result, [CallerMemberName] string method = "")
{
for (var i = 0; i < RetElementCount; i++)
{
if ((i == 1 ? result[i] != 2 : result[i] != 0))
{
Succeeded = false;
break;
}
}
if (!Succeeded)
{
Console.WriteLine($"{nameof(Sse41)}.{nameof(Sse41.Insert)}<UInt64>(Vector128<UInt64><9>): {method} failed:");
Console.WriteLine($" firstOp: ({string.Join(", ", firstOp)})");
Console.WriteLine($" result: ({string.Join(", ", result)})");
Console.WriteLine();
}
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.Collections.Generic;
using Xunit;
namespace System.Reflection.Tests
{
public class IsAssignableFromTest
{
[Fact]
public void Test1()
{
VerifyIsAssignableFrom("B&null", typeof(B).GetTypeInfo(), null, false);
}
[Fact]
public void Test2()
{
VerifyIsAssignableFrom("ListArray", typeof(IList<object>).GetTypeInfo(), typeof(object[]).GetTypeInfo(), true);
}
[Fact]
public void Test3()
{
VerifyIsAssignableFrom("ArrayList", typeof(object[]).GetTypeInfo(), typeof(IList<object>).GetTypeInfo(), false);
}
[Fact]
public void Test4()
{
VerifyIsAssignableFrom("B&D", typeof(B).GetTypeInfo(), typeof(D).GetTypeInfo(), true);
}
[Fact]
public void Test5()
{
VerifyIsAssignableFrom("B[]&D[]", typeof(B[]).GetTypeInfo(), typeof(D[]).GetTypeInfo(), true);
}
[Fact]
public void Test6()
{
VerifyIsAssignableFrom("IList<object>&B[]", typeof(IList<object>).GetTypeInfo(), typeof(B[]).GetTypeInfo(), true);
}
[Fact]
public void Test7()
{
VerifyIsAssignableFrom("IList<B>*B[]", typeof(IList<B>).GetTypeInfo(), typeof(B[]).GetTypeInfo(), true);
}
[Fact]
public void Test8()
{
VerifyIsAssignableFrom("IList<B>&D[]", typeof(IList<B>).GetTypeInfo(), typeof(D[]).GetTypeInfo(), true);
}
[Fact]
public void Test9()
{
VerifyIsAssignableFrom("IList<D> & D[]", typeof(IList<D>).GetTypeInfo(), typeof(D[]).GetTypeInfo(), true);
}
[Fact]
public void Test10()
{
VerifyIsAssignableFrom("I<object>&G2<object>", typeof(I<object>).GetTypeInfo(), typeof(G2<object>).GetTypeInfo(), true);
}
[Fact]
public void Test11()
{
VerifyIsAssignableFrom("G<string>&G2<string>", typeof(G<string>).GetTypeInfo(), typeof(G2<string>).GetTypeInfo(), true);
}
[Fact]
public void Test12()
{
VerifyIsAssignableFrom("G<string>&G<string>", typeof(G<string>).GetTypeInfo(), typeof(G<string>).GetTypeInfo(), true);
}
[Fact]
public void Test13()
{
VerifyIsAssignableFrom("G<string>&G<object>", typeof(G<string>).GetTypeInfo(), typeof(G<object>).GetTypeInfo(), false);
}
[Fact]
public void Test14()
{
VerifyIsAssignableFrom("G<object>&G<stgring>", typeof(G<object>).GetTypeInfo(), typeof(G<string>).GetTypeInfo(), false);
}
[Fact]
public void Test15()
{
VerifyIsAssignableFrom("G2<object>&G<object>", typeof(G2<object>).GetTypeInfo(), typeof(G<object>).GetTypeInfo(), false);
}
[Fact]
public void Test16()
{
VerifyIsAssignableFrom("G<string>&I<String>", typeof(G<string>).GetTypeInfo(), typeof(I<string>).GetTypeInfo(), false);
}
[Fact]
public void Test17()
{
VerifyIsAssignableFrom("I2 I2", typeof(I2).GetTypeInfo(), typeof(I2).GetTypeInfo(), true);
}
[Fact]
public void Test18()
{
VerifyIsAssignableFrom("I2 B", typeof(I2).GetTypeInfo(), typeof(B).GetTypeInfo(), true);
}
[Fact]
public void Test19()
{
VerifyIsAssignableFrom("I2 D", typeof(I2).GetTypeInfo(), typeof(D).GetTypeInfo(), true);
}
[Fact]
public void Test20()
{
VerifyIsAssignableFrom("I2 Gen<>", typeof(I2).GetTypeInfo(), typeof(Gen<>).GetTypeInfo(), true);
}
[Fact]
public void Test21()
{
VerifyIsAssignableFrom("I2 Gen<string>", typeof(I2).GetTypeInfo(), typeof(Gen<string>).GetTypeInfo(), true);
}
[Fact]
public void Test22()
{
VerifyIsAssignableFrom("D I1", typeof(D).GetTypeInfo(), typeof(I1).GetTypeInfo(), false);
}
[Fact]
public void Test23()
{
TypeInfo gt = typeof(Gen2<>).GetGenericArguments()[0].GetTypeInfo();
VerifyIsAssignableFrom("I1 Gen2<>.GenericTypeArguments", typeof(I1).GetTypeInfo(), gt, true);
}
[Fact]
public void Test24()
{
TypeInfo gt = typeof(Gen2<>).GetGenericArguments()[0].GetTypeInfo();
}
[Fact]
public void Test25()
{
TypeInfo gt = typeof(Gen2<>).GetGenericArguments()[0].GetTypeInfo();
VerifyIsAssignableFrom("I2 Gen2<>.GenericTypeArguments", typeof(I2).GetTypeInfo(), gt, true);
}
[Fact]
public void Test26()
{
TypeInfo gt = typeof(Gen2<>).GetGenericArguments()[0].GetTypeInfo();
VerifyIsAssignableFrom("Gen<> Gen2<>.GenericTypeArguments", typeof(Gen<>).GetTypeInfo(), gt, false);
}
[Fact]
public void Test27()
{
VerifyIsAssignableFrom("Case500.A Case500.B", typeof(Case500.A).GetTypeInfo(), typeof(Case500.B).GetTypeInfo(), true);
}
[Fact]
public void Test28()
{
VerifyIsAssignableFrom("Case500.A Case500.C", typeof(Case500.A).GetTypeInfo(), typeof(Case500.C).GetTypeInfo(), true);
}
[Fact]
public void Test29()
{
VerifyIsAssignableFrom("Case500.B Case500.C", typeof(Case500.B).GetTypeInfo(), typeof(Case500.C).GetTypeInfo(), true);
}
[Fact]
public void Test30()
{
VerifyIsAssignableFrom("G10<>.GetGenericTypeArguments I1", typeof(G10<>).GetGenericArguments()[0].GetTypeInfo(), typeof(I1).GetTypeInfo(), false);
}
[Fact]
public void Test31()
{
VerifyIsAssignableFrom("G10<>.GetGenericTypeArguments B", typeof(G10<>).GetGenericArguments()[0].GetTypeInfo(), typeof(B).GetTypeInfo(), false);
}
[Fact]
public void Test32()
{
VerifyIsAssignableFrom("I1 G10<>.GetGenericTypeArguments", typeof(I1).GetTypeInfo(), typeof(G10<>).GetGenericArguments()[0].GetTypeInfo(), true);
}
[Fact]
public void Test33()
{
VerifyIsAssignableFrom("B G10<>.GetGenericTypeArguments", typeof(B).GetTypeInfo(), typeof(G10<>).GetGenericArguments()[0].GetTypeInfo(), false);
}
[Fact]
public void Test34()
{
VerifyIsAssignableFrom("I1 Gen2<>.GetGenericArguments", typeof(I1).GetTypeInfo(), typeof(Gen2<>).GetGenericArguments()[0].GetTypeInfo(), true);
}
[Fact]
public void Test35()
{
VerifyIsAssignableFrom("I2 Gen2<>.GetGenericArguments", typeof(I2).GetTypeInfo(), typeof(Gen2<>).GetGenericArguments()[0].GetTypeInfo(), true);
}
// a T[] is assignable to IList<U> iff T[] is assignable to U[]
[Fact]
public void Test36()
{
VerifyIsAssignableFrom("I1[] S[]", typeof(I1[]).GetTypeInfo(), typeof(S[]).GetTypeInfo(), false);
}
[Fact]
public void Test37()
{
VerifyIsAssignableFrom("I1[] D[]", typeof(I1[]).GetTypeInfo(), typeof(D[]).GetTypeInfo(), true);
}
[Fact]
public void Test38()
{
VerifyIsAssignableFrom("IList<I1> S[]", typeof(IList<I1>).GetTypeInfo(), typeof(S[]).GetTypeInfo(), false);
}
[Fact]
public void Test39()
{
VerifyIsAssignableFrom("IList<I1> D[]", typeof(IList<I1>).GetTypeInfo(), typeof(D[]).GetTypeInfo(), true);
}
[Fact]
public void Test40()
{
VerifyIsAssignableFrom("int[] uint[]", typeof(int[]).GetTypeInfo(), typeof(uint[]).GetTypeInfo(), true);
}
[Fact]
public void Test41()
{
VerifyIsAssignableFrom("uint[] int[]", typeof(uint[]).GetTypeInfo(), typeof(int[]).GetTypeInfo(), true);
}
[Fact]
public void Test42()
{
VerifyIsAssignableFrom("IList<int> uint[]", typeof(IList<int>).GetTypeInfo(), typeof(uint[]).GetTypeInfo(), true);
}
[Fact]
public void Test43()
{
VerifyIsAssignableFrom("IList<uint> int[]", typeof(IList<uint>).GetTypeInfo(), typeof(int[]).GetTypeInfo(), true);
}
private void VerifyIsAssignableFrom(String testName, TypeInfo left, TypeInfo right, Boolean expected)
{
Boolean actual = left.IsAssignableFrom(right);
Assert.Equal(expected, actual);
}
}
internal interface I1 { }
internal interface I2 { }
internal struct S : I1 { }
internal class B : I1, I2 { }
internal class D : B { }
internal class Gen<T> : D { }
internal class I<T> { }
internal class G<T> : I<T> { }
internal class G2<T> : G<T> { }
internal class Gen2<T> where T : Gen<T>, I1, I2 { }
namespace Case500
{
internal abstract class A { }
internal abstract class B : A { }
internal class C : B { }
}
internal class G10<T> where T : I1 { }
public class TransparentRC : ReflectionContext
{
public override Assembly MapAssembly(Assembly assembly)
{
return assembly;
}
public override TypeInfo MapType(TypeInfo type)
{
return type;
}
}
}
| |
using System.Drawing;
using System.Windows.Forms;
namespace ToolStripCustomizer.ColorTables
{
sealed class OfficeClassicColorTable : PresetColorTable
{
public OfficeClassicColorTable()
: base("Office Classic")
{
}
public override Color ButtonSelectedBorder
{
get
{
return Color.FromArgb(255, 10, 36, 106);
}
}
public override Color ButtonCheckedGradientBegin
{
get
{
return Color.FromArgb(255, 131, 144, 179);
}
}
public override Color ButtonCheckedGradientMiddle
{
get
{
return Color.FromArgb(255, 131, 144, 179);
}
}
public override Color ButtonCheckedGradientEnd
{
get
{
return Color.FromArgb(255, 182, 189, 209);
}
}
public override Color ButtonSelectedGradientBegin
{
get
{
return Color.FromArgb(255, 182, 189, 210);
}
}
public override Color ButtonSelectedGradientMiddle
{
get
{
return Color.FromArgb(255, 182, 189, 210);
}
}
public override Color ButtonSelectedGradientEnd
{
get
{
return Color.FromArgb(255, 182, 189, 210);
}
}
public override Color ButtonPressedGradientBegin
{
get
{
return Color.FromArgb(255, 133, 146, 181);
}
}
public override Color ButtonPressedGradientMiddle
{
get
{
return Color.FromArgb(255, 133, 146, 181);
}
}
public override Color ButtonPressedGradientEnd
{
get
{
return Color.FromArgb(255, 133, 146, 181);
}
}
public override Color CheckBackground
{
get
{
return Color.FromArgb(255, 210, 214, 236);
}
}
public override Color CheckSelectedBackground
{
get
{
return Color.FromArgb(255, 133, 146, 181);
}
}
public override Color CheckPressedBackground
{
get
{
return Color.FromArgb(255, 133, 146, 181);
}
}
public override Color GripDark
{
get
{
return Color.FromArgb(255, 160, 160, 160);
}
}
public override Color GripLight
{
get
{
return Color.FromArgb(255, 255, 255, 255);
}
}
public override Color ImageMarginGradientBegin
{
get
{
return Color.FromArgb(255, 245, 244, 242);
}
}
public override Color ImageMarginGradientMiddle
{
get
{
return Color.FromArgb(255, 234, 232, 228);
}
}
public override Color ImageMarginGradientEnd
{
get
{
return Color.FromArgb(255, 212, 208, 200);
}
}
public override Color ImageMarginRevealedGradientBegin
{
get
{
return Color.FromArgb(255, 238, 236, 233);
}
}
public override Color ImageMarginRevealedGradientMiddle
{
get
{
return Color.FromArgb(255, 225, 222, 217);
}
}
public override Color ImageMarginRevealedGradientEnd
{
get
{
return Color.FromArgb(255, 216, 213, 206);
}
}
public override Color MenuStripGradientBegin
{
get
{
return Color.FromArgb(255, 212, 208, 200);
}
}
public override Color MenuStripGradientEnd
{
get
{
return Color.FromArgb(255, 246, 245, 244);
}
}
public override Color MenuItemSelected
{
get
{
return Color.FromArgb(255, 210, 214, 236);
}
}
public override Color MenuItemBorder
{
get
{
return Color.FromArgb(255, 10, 36, 106);
}
}
public override Color MenuBorder
{
get
{
return Color.FromArgb(255, 102, 102, 102);
}
}
public override Color MenuItemSelectedGradientBegin
{
get
{
return Color.FromArgb(255, 182, 189, 210);
}
}
public override Color MenuItemSelectedGradientEnd
{
get
{
return Color.FromArgb(255, 182, 189, 210);
}
}
public override Color MenuItemPressedGradientBegin
{
get
{
return Color.FromArgb(255, 245, 244, 242);
}
}
public override Color MenuItemPressedGradientMiddle
{
get
{
return Color.FromArgb(255, 225, 222, 217);
}
}
public override Color MenuItemPressedGradientEnd
{
get
{
return Color.FromArgb(255, 234, 232, 228);
}
}
public override Color RaftingContainerGradientBegin
{
get
{
return Color.FromArgb(255, 212, 208, 200);
}
}
public override Color RaftingContainerGradientEnd
{
get
{
return Color.FromArgb(255, 246, 245, 244);
}
}
public override Color SeparatorDark
{
get
{
return Color.FromArgb(255, 166, 166, 166);
}
}
public override Color SeparatorLight
{
get
{
return Color.FromArgb(255, 255, 255, 255);
}
}
public override Color StatusStripGradientBegin
{
get
{
return Color.FromArgb(255, 212, 208, 200);
}
}
public override Color StatusStripGradientEnd
{
get
{
return Color.FromArgb(255, 246, 245, 244);
}
}
public override Color ToolStripBorder
{
get
{
return Color.FromArgb(255, 219, 216, 209);
}
}
public override Color ToolStripDropDownBackground
{
get
{
return Color.FromArgb(255, 249, 248, 247);
}
}
public override Color ToolStripGradientBegin
{
get
{
return Color.FromArgb(255, 245, 244, 242);
}
}
public override Color ToolStripGradientMiddle
{
get
{
return Color.FromArgb(255, 234, 232, 228);
}
}
public override Color ToolStripGradientEnd
{
get
{
return Color.FromArgb(255, 212, 208, 200);
}
}
public override Color ToolStripContentPanelGradientBegin
{
get
{
return Color.FromArgb(255, 212, 208, 200);
}
}
public override Color ToolStripContentPanelGradientEnd
{
get
{
return Color.FromArgb(255, 246, 245, 244);
}
}
public override Color ToolStripPanelGradientBegin
{
get
{
return Color.FromArgb(255, 212, 208, 200);
}
}
public override Color ToolStripPanelGradientEnd
{
get
{
return Color.FromArgb(255, 246, 245, 244);
}
}
public override Color OverflowButtonGradientBegin
{
get
{
return Color.FromArgb(255, 225, 222, 217);
}
}
public override Color OverflowButtonGradientMiddle
{
get
{
return Color.FromArgb(255, 216, 213, 206);
}
}
public override Color OverflowButtonGradientEnd
{
get
{
return Color.FromArgb(255, 128, 128, 128);
}
}
}
}
| |
//
// SubpropertyCollection.cs
//
// Author: Kees van Spelde <[email protected]>
//
// Copyright (c) 2014-2021 Magic-Sessions. (www.magic-sessions.com)
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NON INFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//
using System;
using System.Collections;
using System.Collections.ObjectModel;
namespace VCardReader.Collections
{
/// <summary>
/// A collection of <see cref="Subproperty" /> objects.
/// </summary>
/// <remarks>
/// <para>
/// This class is a general-purpose collection of <see cref="Subproperty" /> objects.
/// </para>
/// <para>
/// A property of a vCard contains a piece of contact information, such as an email address
/// or web site. A subproperty indicates options or attributes of the property, such as the
/// type of email address or character set.
/// </para>
/// </remarks>
/// <seealso cref="Property" />
/// <seealso cref="Subproperty" />
public class SubpropertyCollection : Collection<Subproperty>
{
#region Add
/// <summary>
/// Adds a subproperty without a value.
/// </summary>
/// <param name="name">
/// The name of the subproperty.
/// </param>
public void Add(string name)
{
if (string.IsNullOrEmpty(name))
throw new ArgumentNullException("name");
Add(new Subproperty(name));
}
/// <summary>
/// Adds a subproperty with the specified name and value.
/// </summary>
/// <param name="name">
/// The name of the new subproperty to add.
/// </param>
/// <param name="value">
/// The value of the new subproperty to add. This can be null.
/// </param>
public void Add(string name, string value)
{
Add(new Subproperty(name, value));
}
#endregion
#region AddOrUpdate
/// <summary>
/// Either adds or updates a subproperty with the specified name.
/// </summary>
/// <param name="name">
/// The name of the subproperty to add or update.
/// </param>
/// <param name="value">
/// The value of the subproperty to add or update.
/// </param>
public void AddOrUpdate(string name, string value)
{
if (string.IsNullOrEmpty(name))
throw new ArgumentNullException("name");
var index = IndexOf(name);
if (index == -1)
Add(name, value);
else
this[index].Value = value;
}
#endregion
#region Contains
/// <summary>
/// Determines if the collection contains a subproperty with the specified name.
/// </summary>
/// <param name="name">
/// The name of the subproperty.
/// </param>
/// <returns>
/// True if the collection contains a subproperty with the specified name, or False otherwise.
/// </returns>
public bool Contains(string name)
{
foreach (var subproperty in this)
{
if (string.Compare(name, subproperty.Name, StringComparison.OrdinalIgnoreCase) == 0)
return true;
}
return false;
}
#endregion
#region GetNames
/// <summary>
/// Builds a string array containing subproperty names.
/// </summary>
/// <returns>
/// A string array containing the unmodified name of each subproperty in the collection.
/// </returns>
public string[] GetNames()
{
var names = new ArrayList(Count);
foreach (var subproperty in this)
names.Add(subproperty.Name);
return (string[]) names.ToArray(typeof (string));
}
/// <summary>
/// Builds a string array containing all subproperty
/// names that match one of the names in an array.
/// </summary>
/// <param name="filteredNames">
/// A list of valid subproperty names.
/// </param>
/// <returns>
/// A string array containing the names of all subproperties
/// that match an entry in the filterNames list.
/// </returns>
public string[] GetNames(string[] filteredNames)
{
if (filteredNames == null)
throw new ArgumentNullException("filteredNames");
// The vCard specification is not case-sensitive.
// Therefore the subproperty names and the filter names
// list must be compared in a case-insensitive matter.
// Whitespace will also be ignored. For better-
// performing comparisons, a processed version of
// the filtered list will be constructed.
var processedNames =
(string[]) filteredNames.Clone();
for (var index = 0; index < processedNames.Length; index++)
{
if (!string.IsNullOrEmpty(processedNames[index]))
processedNames[index] =
processedNames[index].Trim().ToUpperInvariant();
}
// Matching names will be stored in an array list,
// and then converted to a string array for return.
var matchingNames = new ArrayList();
foreach (var subproperty in this)
{
// Convert this subproperty name to upper case.
// The names in the processed array are already
// in upper case.
var subName =
subproperty.Name == null ? null : subproperty.Name.ToUpperInvariant();
// See if the processed subproperty name has any
// matches in the processed array.
var matchIndex =
Array.IndexOf(processedNames, subName);
if (matchIndex != -1)
matchingNames.Add(processedNames[matchIndex]);
}
return (string[]) matchingNames.ToArray(typeof (string));
}
#endregion
#region GetValue
/// <summary>
/// Get the value of the subproperty with the specified name.
/// </summary>
/// <param name="name">
/// The name of the subproperty.
/// </param>
/// <returns>
/// The value of the subproperty or null if no such subproperty exists in the collection.
/// </returns>
public string GetValue(string name)
{
if (string.IsNullOrEmpty(name))
throw new ArgumentNullException("name");
// Get the collection index of the subproperty
// object that has the specified name.
var index = IndexOf(name);
return index == -1 ? null : this[index].Value;
}
/// <summary>
/// Gets the value of the first subproperty with the specified name, or the first value specified in
/// a list.
/// </summary>
/// <param name="name">
/// The expected name of the subproperty.
/// </param>
/// <param name="namelessValues">
/// A list of values that are sometimes listed as subproperty names. The first matching value is
/// returned if the name parameter does not match.
/// </param>
public string GetValue(
string name,
string[] namelessValues)
{
if (string.IsNullOrEmpty(name))
throw new ArgumentNullException("name");
// See if the subproperty exists with the
// specified name. If so, return the value
// immediately.
var index = IndexOf(name);
if (index != -1)
return this[index].Value;
// A subproperty with the specified name does
// not exist. However, this does not mean that
// the subproperty does not exist. Some subproperty
// values can be written directly without a name.
// An example is the ENCODING property. Example:
//
// New Format: KEY;ENCODING=BASE64:....
// Old Format: KEY;BASE64:...
if ((namelessValues == null) || (namelessValues.Length == 0))
return null;
var nameIndex = IndexOfAny(namelessValues);
return nameIndex == -1 ? null : this[nameIndex].Name;
}
#endregion
#region IndexOf
/// <summary>
/// Searches for a subproperty with the specified name.
/// </summary>
/// <param name="name">
/// The name of the subproperty.
/// </param>
/// <returns>
/// The collection (zero-based) index of the first subproperty that matches the specified name.
/// The function returns -1 if no match is found.
/// </returns>
public int IndexOf(string name)
{
for (var index = 0; index < Count; index++)
{
if (string.Compare(name, this[index].Name, StringComparison.OrdinalIgnoreCase) == 0)
{
return index;
}
}
return -1;
}
#endregion
#region IndexOfAny
/// <summary>
/// Finds the first subproperty that has any of the specified names.
/// </summary>
/// <param name="names">
/// An array of names to search.
/// </param>
/// <returns>
/// The collection index of the first subproperty with the specified name, or -1 if no subproperty was found.
/// </returns>
public int IndexOfAny(string[] names)
{
if (names == null)
throw new ArgumentNullException("names");
for (var index = 0; index < Count; index++)
{
foreach (var name in names)
{
if (string.Compare(this[index].Name, name, StringComparison.OrdinalIgnoreCase) == 0)
return index;
}
}
return -1;
}
#endregion
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Text;
using System.Text.RegularExpressions;
using System.Windows.Forms;
using System.IO;
using System.Net;
using System.Net.Sockets;
using System.Diagnostics;
using System.Threading;
using System.Management;
using Microsoft.Win32;
using AEMManager.Util;
namespace AEMManager {
class AemActions {
private static readonly log4net.ILog mLog = log4net.LogManager.GetLogger(System.Reflection.MethodBase.GetCurrentMethod().DeclaringType);
// utility class only
private AemActions() { }
public static void AddOpenMenuItems(Menu.MenuItemCollection pParent, AemInstance pInstance, bool pSetDefaultItem) {
List<MenuItem> menuItems = new List<MenuItem>();
MenuItem item;
item = new MenuItem();
item.Text = "Open Author/Publish";
item.Click += new EventHandler(OpenAuthorPublish);
if (pSetDefaultItem) {
item.DefaultItem = true;
}
menuItems.Add(item);
item = new MenuItem();
item.Text = "Open CRX";
item.Click += new EventHandler(OpenCRX);
menuItems.Add(item);
item = new MenuItem();
item.Text = "Open CRXDE Lite";
item.Click += new EventHandler(OpenCRXDELite);
menuItems.Add(item);
item = new MenuItem();
item.Text = "Open Felix Console";
item.Click += new EventHandler(OpenFelixConsole);
menuItems.Add(item);
item = new MenuItem();
item.Text = "Open Folder";
item.Click += new EventHandler(OpenFolder);
menuItems.Add(item);
if (pInstance.AemInstanceType == AemInstanceType.AEM54) {
item = new MenuItem();
item.Text = "Open Servlet Engine Admin";
item.Click += new EventHandler(OpenServletEngineAdmin);
menuItems.Add(item);
}
foreach (MenuItem i in menuItems) {
i.Tag = pInstance;
}
pParent.AddRange(menuItems.ToArray());
}
public static void OpenAuthorPublish(object sender, EventArgs e) {
AemInstance instance = Program.GetActionInstance(sender);
if (instance == null) {
return;
}
OpenUrl(instance.UrlWithContextPath, instance);
}
private static void OpenCRX(object sender, EventArgs e) {
AemInstance instance = Program.GetActionInstance(sender);
if (instance == null) {
return;
}
string url = instance.UrlWithContextPath + "/crx/explorer/";
if (instance.AemInstanceType == AemInstanceType.AEM54) {
url = instance.UrlWithoutContextPath + "/crx/";
}
OpenUrl(url, instance);
}
private static void OpenCRXDELite(object sender, EventArgs e) {
AemInstance instance = Program.GetActionInstance(sender);
if (instance == null) {
return;
}
string url = instance.UrlWithContextPath + "/crx/de/";
if (instance.AemInstanceType == AemInstanceType.AEM54) {
url = instance.UrlWithoutContextPath + "/crx/de/";
}
else {
// check if DavEx servlet is enabled before opening CRXDE lite
SlingDavExServlet davEx = new SlingDavExServlet(instance);
davEx.CheckDavExStatus();
}
OpenUrl(url, instance);
}
private static void OpenFelixConsole(object sender, EventArgs e) {
AemInstance instance = Program.GetActionInstance(sender);
if (instance == null) {
return;
}
OpenUrl(instance.UrlWithContextPath + "/system/console", instance);
}
private static void OpenFolder(object sender, EventArgs e) {
AemInstance instance = Program.GetActionInstance(sender);
if (instance == null) {
return;
}
string folder = instance.PathWithoutFilename;
if (!Directory.Exists(folder)) {
return;
}
mLog.Info("Open Folder: " + folder);
System.Diagnostics.Process.Start(folder);
}
private static void OpenServletEngineAdmin(object sender, EventArgs e) {
AemInstance instance = Program.GetActionInstance(sender);
if (instance == null) {
return;
}
OpenUrl(instance.UrlWithoutContextPath + "/admin", instance);
}
private static void OpenUrl(string pUrl, AemInstance pInstance) {
mLog.Info("Open Url: " + pUrl);
System.Diagnostics.Process p = new System.Diagnostics.Process();
if (string.IsNullOrEmpty(pInstance.BrowserExecutable)) {
p.StartInfo.FileName = pUrl;
}
else {
p.StartInfo.FileName = pInstance.BrowserExecutable;
p.StartInfo.Arguments = pUrl;
}
p.Start();
}
public static void AddControlMenuItems(Menu.MenuItemCollection pParent, AemInstance pInstance) {
List<MenuItem> menuItems = new List<MenuItem>();
MenuItem item;
item = new MenuItem();
item.Text = "Start instance";
item.Click += new EventHandler(ControlStartInstance);
menuItems.Add(item);
item = new MenuItem();
item.Text = "Stop instance";
item.Click += new EventHandler(ControlStopInstance);
menuItems.Add(item);
item = new MenuItem();
item.Text = "Kill instance";
item.Click += new EventHandler(ControlKillInstance);
menuItems.Add(item);
foreach (MenuItem i in menuItems) {
i.Tag = pInstance;
}
pParent.AddRange(menuItems.ToArray());
}
private static string BuildCommandLineArguments(AemInstance pInstance) {
List<string> javaArgs = new List<string>();
List<string> jarArgs = new List<string>();
// memory settings
javaArgs.Add("-Xms" + pInstance.HeapMinSizeMb + "m");
javaArgs.Add("-Xmx" + pInstance.HeapMaxSizeMb + "m");
javaArgs.Add("-XX:MaxPermSize=" + pInstance.MaxPermSizeMb + "m");
// instance port
if (pInstance.AemInstanceType == AemInstanceType.AEM54) {
javaArgs.Add("-D-crx.quickstart.server.port=" + pInstance.Port);
}
else {
jarArgs.Add("-p " + pInstance.Port);
}
// run modes
string runModes = pInstance.Runmode.ToString().ToLower()
+ "," + (pInstance.RunmodeSampleContent ? "samplecontent" : "nosamplecontent")
+ (!string.IsNullOrEmpty(pInstance.AdditionalRunmodes) ? "," + pInstance.AdditionalRunmodes : "");
if (pInstance.AemInstanceType == AemInstanceType.AEM54) {
javaArgs.Add("-Dsling.run.modes=" + runModes);
}
else {
jarArgs.Add("-r " + runModes);
}
// context path
if (!string.IsNullOrEmpty(pInstance.ContextPath) && pInstance.ContextPath!="/") {
jarArgs.Add("-contextpath " + pInstance.ContextPath);
}
// debug mode
if (pInstance.JVMDebug && (pInstance.DebugPort > 0)) {
javaArgs.Add("-Xdebug -Xnoagent -Djava.compiler=NONE -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=" + pInstance.DebugPort);
}
// JProfile mode
if (pInstance.JProfiler && (pInstance.JProfilerPort > 0)) {
javaArgs.Add("-agentlib:jprofilerti=port=" + pInstance.JProfilerPort + " -Xbootclasspath/a:" + AEMManager.Properties.Settings.Default.JProfilerAgent.Replace("\\", "/"));
}
// JConsole mode
if (pInstance.JConsole && (pInstance.JConsolePort > 0)) {
javaArgs.Add("-Dcom.sun.management.jmxremote.port=" + pInstance.JConsolePort + " -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false");
}
// Hide AEM configuration wizards
if (pInstance.HideConfigWizard) {
javaArgs.Add("-DhideConfigWizard=true");
}
// custom JVM args
if (pInstance.CustomJVMParam1Active) {
javaArgs.Add(pInstance.CustomJVMParam1);
}
if (pInstance.CustomJVMParam2Active) {
javaArgs.Add(pInstance.CustomJVMParam2);
}
if (pInstance.CustomJVMParam3Active) {
javaArgs.Add(pInstance.CustomJVMParam3);
}
// configure AEM start mode
if (pInstance.ShowInstanceWindow) {
jarArgs.Add("-v");
}
if (!pInstance.OpenBrowser) {
jarArgs.Add("-nobrowser");
}
// suppress forking for AEM55 and above, because otherwise debugging will not work
if (pInstance.AemInstanceType != AemInstanceType.AEM54) {
jarArgs.Add("-nofork");
}
// add jar command line args
javaArgs.Add("-jar " + pInstance.PathFilename + " " + String.Join(" ", jarArgs.ToArray()));
// build complete string
return String.Join(" ", javaArgs.ToArray());
}
private static void ControlStartInstance(object sender, EventArgs e) {
AemInstance instance = Program.GetActionInstance(sender);
if (instance == null) {
return;
}
if (instance.CurrentBundleStatus == BundleStatus.UNKNOWN
|| instance.CurrentBundleStatus == BundleStatus.STARTING_STOPPING
|| instance.CurrentBundleStatus == BundleStatus.RUNNING) {
if (MessageBox.Show("The instance seems to be running already.\n"
+ "Press OK to continue starting the instance, it may fail.",
"Start Instance", MessageBoxButtons.OKCancel, MessageBoxIcon.Exclamation)==DialogResult.Cancel) {
return;
}
}
string executable = "cmd";
string aemInstanceArguments = BuildCommandLineArguments(instance);
string arguments = "/c \"" + instance.JavaExecutable + "\" " + aemInstanceArguments;
// add jprofiler path to current applications path to make sure spawned process gets it as well
if (instance.JProfiler && (instance.JProfilerPort > 0)) {
string path = System.Environment.GetEnvironmentVariable("Path");
if (!path.Contains(AEMManager.Properties.Settings.Default.JProfilerPath)) {
path += ";" + AEMManager.Properties.Settings.Default.JProfilerPath;
System.Environment.SetEnvironmentVariable("Path", path);
}
}
bool isConsoleOutputWindow = instance.ConsoleOutputWindow.Visible;
if (!isConsoleOutputWindow) {
// show and hide console window again when it is not shown already - to prevent deadlock that occured sometimes stopping instances (DINT-349)
instance.ConsoleOutputWindow.Show();
}
instance.ConsoleOutputWindow.InitStartProcess(instance.PathWithoutFilename, instance.JavaExecutable, aemInstanceArguments);
instance.JavaProcess = ExecuteCommand(instance.PathWithoutFilename, executable, arguments, instance.Name, instance.ShowInstanceWindow, "aem.ico", false, instance);
instance.JavaProcessVisible = instance.ShowInstanceWindow;
if (!isConsoleOutputWindow) {
instance.ConsoleOutputWindow.Hide();
}
}
private static void ControlStopInstance(object sender, EventArgs e) {
AemInstance instance = Program.GetActionInstance(sender);
if (instance == null) {
return;
}
StopInstance(instance);
}
public static void StopInstance(AemInstance pInstance) {
string shutdownUrl = pInstance.UrlWithContextPath + "/system/console/vmstat?shutdown_type=Stop";
if (pInstance.AemInstanceType == AemInstanceType.AEM54) {
shutdownUrl = pInstance.UrlWithoutContextPath + "/admin/shutdown";
}
try {
mLog.Debug("Execute shutdown URL: " + shutdownUrl);
pInstance.ConsoleOutputWindow.AppendConsoleLog("Shutting down instance...");
HttpWebRequest request = pInstance.WebRequestCreate(shutdownUrl);
request.Method = "POST";
request.GetResponse();
}
catch (WebException ex) {
mLog.Debug("Unable to connect to " + shutdownUrl + ": " + ex.Message);
}
catch (Exception ex) {
mLog.Error("Error executing shutdown URL: " + shutdownUrl, ex);
}
}
private static void ControlKillInstance(object sender, EventArgs e) {
AemInstance instance = Program.GetActionInstance(sender);
if (instance == null) {
return;
}
Process process = instance.GetInstanceJavaProcess();
if (process != null) {
KillProcessAndChildrens(process.Id, instance);
}
}
private static void KillProcessAndChildrens(int pid, AemInstance instance) {
ManagementObjectSearcher processSearcher = new ManagementObjectSearcher
("Select * From Win32_Process Where ParentProcessID=" + pid);
ManagementObjectCollection processCollection = processSearcher.Get();
try {
Process proc = Process.GetProcessById(pid);
if (proc != null && !proc.HasExited) {
mLog.Debug("Killing process #" + pid + " for instance " + instance.Name);
proc.Kill();
}
}
catch (ArgumentException) {
// Process already exited.
}
if (processCollection != null) {
foreach (ManagementObject mo in processCollection) {
// kill child processes(also kills childrens of childrens etc.)
KillProcessAndChildrens(Convert.ToInt32(mo["ProcessID"]), instance);
}
}
}
private static Process ExecuteCommand(string pWorkDir, string pExecutable, string pArguments, string pProcessName,
bool pShowInstanceWindow, string pIconFile, bool pProcessNameViaShortcut, AemInstance pAemInstance) {
mLog.Info("Execute: WorkDir=" + pWorkDir + ", executable=" + pExecutable + ", arguments=" + pArguments);
Process process;
// execute via auto-generated shortcut
if (pShowInstanceWindow && pProcessNameViaShortcut) {
string shortcutFilename = Path.GetTempPath() + pProcessName + ".lnk";
IWshRuntimeLibrary.WshShell wshShell = new IWshRuntimeLibrary.WshShellClass();
IWshRuntimeLibrary.IWshShortcut shortcut = (IWshRuntimeLibrary.IWshShortcut)wshShell.CreateShortcut(shortcutFilename);
if (!string.IsNullOrEmpty(pWorkDir)) {
shortcut.WorkingDirectory = pWorkDir;
}
try {
shortcut.TargetPath = pExecutable;
}
catch (ArgumentException) {
MessageBox.Show("Executable not found: " + pExecutable, "Execute Command", MessageBoxButtons.OK, MessageBoxIcon.Warning);
return null;
}
shortcut.Arguments = pArguments;
shortcut.Description = pProcessName;
if (!string.IsNullOrEmpty(pIconFile)) {
shortcut.IconLocation = Path.GetDirectoryName(Application.ExecutablePath) + "\\icons\\" + pIconFile;
}
shortcut.Save();
process = new Process();
process.StartInfo.FileName = shortcutFilename;
}
// start directly
else {
process = new Process();
if (!string.IsNullOrEmpty(pWorkDir)) {
process.StartInfo.WorkingDirectory = pWorkDir;
}
process.StartInfo.FileName = pExecutable;
process.StartInfo.Arguments = pArguments;
}
if (!pShowInstanceWindow) {
process.StartInfo.WindowStyle = ProcessWindowStyle.Hidden;
}
if (pProcessNameViaShortcut) {
// use shellexecute if start via shortcut is used - this forbids using output stream redirection etc.
process.StartInfo.UseShellExecute = true;
}
else {
// directy start process if no shortcut is used.
process.StartInfo.UseShellExecute = false;
process.StartInfo.CreateNoWindow = !pShowInstanceWindow;
if (pAemInstance != null && !pShowInstanceWindow) {
// use output handling if AEM instance is available
process.OutputDataReceived += new DataReceivedEventHandler(pAemInstance.ConsoleOutputWindow.Process_OutputDataReceived);
process.StartInfo.RedirectStandardOutput = true;
process.ErrorDataReceived += new DataReceivedEventHandler(pAemInstance.ConsoleOutputWindow.Process_ErrorDataReceived);
process.StartInfo.RedirectStandardError = true;
}
}
try {
process.Start();
if (!pProcessNameViaShortcut) {
if (pAemInstance != null && !pShowInstanceWindow) {
process.BeginOutputReadLine();
process.BeginErrorReadLine();
}
}
return process;
}
catch (Exception ex) {
throw new Exception(ex.Message + "\n"
+ "WorkDir: " + pWorkDir + "\n"
+ "Executable: " + pExecutable + "\n"
+ "Arguments: " + pArguments, ex);
}
}
public static void AddLogMenuItems(Menu.MenuItemCollection pParent, AemInstance pInstance) {
List<MenuItem> menuItems = new List<MenuItem>();
MenuItem item;
// show dynamic list of current logfiles
item = new MenuItem();
item.Text = "Open logfile...";
item.Popup += LogFilesItem_Popup;
item.MenuItems.Add(new MenuItem("-- No logfiles --"));
menuItems.Add(item);
item = new MenuItem();
item.Text = "Console window";
item.Click += new EventHandler(ShowConsoleWindow);
menuItems.Add(item);
foreach (MenuItem i in menuItems) {
i.Tag = pInstance;
}
pParent.AddRange(menuItems.ToArray());
}
private static void LogFilesItem_Popup(object sender, EventArgs e) {
AemInstance instance = Program.GetActionInstance(sender);
if (instance == null) {
return;
}
MenuItem logFilesItem = (MenuItem)sender;
logFilesItem.MenuItems.Clear();
string logsPath = instance.PathWithoutFilename + @"\crx-quickstart\logs";
if (Directory.Exists(logsPath)) {
string[] logFiles = Directory.GetFiles(logsPath);
if (logFiles.Length > 0) {
foreach (string logFilePath in logFiles) {
string logFile = logFilePath.Substring(logFilePath.LastIndexOf(@"\") + 1);
// skip logfiles with suffixes like ".2016-07-19", "", "-2016-07-12.log", "-4108.log"
if (Regex.Match(logFile, @"^.*\.\d+\-\d+\-\d+$").Success
|| Regex.Match(logFile, @"^.*\-\d+(\-\d+\-\d+)?\.log$").Success) {
continue;
}
MenuItem item = new MenuItem();
item.Text = logFile;
item.Click += new EventHandler(OpenLogFile);
item.Tag = instance;
logFilesItem.MenuItems.Add(item);
}
}
}
if (logFilesItem.MenuItems.Count == 0) {
logFilesItem.MenuItems.Add(new MenuItem("-- No logfiles --"));
}
}
private static void OpenLogFile(object sender, EventArgs e) {
MenuItem item = (MenuItem)sender;
AemInstance instance = Program.GetActionInstance(sender);
if (instance == null) {
return;
}
OpenLogViewer(instance.PathWithoutFilename + @"\crx-quickstart\logs\" + item.Text, instance.Name + " - " + item.Text);
}
private static void ShowConsoleWindow(object sender, EventArgs e) {
AemInstance instance = Program.GetActionInstance(sender);
if (instance == null) {
return;
}
instance.ConsoleOutputWindow.Show();
}
private static void OpenLogViewer(string pLogFile, string pProcessName) {
if (!File.Exists(pLogFile)) {
MessageBox.Show("File does not exist:\n" + pLogFile, "File not found", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
return;
}
RegistryKey preferencesKey = RegistryUtil.GetUserKey("Preferences");
string logViewer = (string)preferencesKey.GetValue("LogViewer", "notepad.exe");
string executable = logViewer;
string arguments = "\"" + pLogFile + "\"";
int pos = executable.IndexOf(" ");
if (pos>=0) {
arguments = executable.Substring(pos + 1) + " " + arguments;
executable = executable.Substring(0, pos);
}
string workdir = null;
try {
workdir = Path.GetDirectoryName(executable);
}
catch (Exception) {
// ignore
}
ExecuteCommand(workdir, executable, arguments, pProcessName, true, "log.ico", AEMManager.Properties.Settings.Default.ProcessNameViaShortcut, null);
}
public static BundleStatus GetCombinedBundleStatus(AemInstance pInstance) {
if (pInstance == null) {
return BundleStatus.NO_ACTIVE_INSTANCE;
}
// check if process is running
if (!pInstance.RemoteProcess) {
Process process = pInstance.GetInstanceJavaProcess();
if (process == null || process.HasExited) {
return BundleStatus.DISABLED;
}
}
// get bundle status
BundleStatus bundleStatus = BundleStatus.UNKNOWN;
string bundleListUrl = pInstance.UrlWithContextPath + "/system/console/bundles/.json";
Stopwatch responseTimeStopwatch = new Stopwatch();
try {
mLog.Debug("Get bundle list from URL: " + bundleListUrl);
HttpWebRequest request = pInstance.WebRequestCreate(bundleListUrl);
request.Method = "GET";
request.Timeout = AEMManager.Properties.Settings.Default.BundleListTimeout;
request.ReadWriteTimeout = AEMManager.Properties.Settings.Default.BundleListTimeout;
responseTimeStopwatch.Start();
using (WebResponse response = request.GetResponse()) {
responseTimeStopwatch.Stop();
String responseText;
using (StreamReader streamReader = new StreamReader(response.GetResponseStream())) {
responseText = streamReader.ReadToEnd();
}
// parse JSON
bool success = false;
object value = JSON.JsonDecode(responseText, ref success);
if (success) {
bundleStatus = GetCombinedBundleStatus(value, responseTimeStopwatch.ElapsedMilliseconds);
}
else {
mLog.Warn("Parsing JSON response failed: " + responseText);
}
}
}
catch (WebException ex) {
if (ex.Status == WebExceptionStatus.Timeout) {
mLog.Debug("Unable to connect to " + bundleListUrl + " due to timeout. "
+ "Configured timeout: " + AEMManager.Properties.Settings.Default.BundleListTimeout + "ms, "
+ "measured response time: " + responseTimeStopwatch.ElapsedMilliseconds + "ms");
}
else {
mLog.Debug("Unable to connect to " + bundleListUrl + ": " + ex.Message);
bundleStatus = BundleStatus.UNKNOWN;
}
}
catch (Exception ex) {
mLog.Error("Error getting bundle list from URL: " + bundleListUrl, ex);
bundleStatus = BundleStatus.UNKNOWN;
}
return bundleStatus;
}
private static BundleStatus GetCombinedBundleStatus(object pJsonObject, long pRepsonseTime) {
BundleStatus bundleStatus = BundleStatus.RUNNING;
Hashtable root = GetHashtable(pJsonObject);
string status = (string)root["status"];
ArrayList data = GetArrayList(root["data"]);
foreach (object dataItem in data) {
Hashtable bundle = GetHashtable(dataItem);
string state = (string)bundle["state"];
if (!string.IsNullOrEmpty(state)) {
if (!(state.Equals("Active") || state.Equals("Fragment"))) {
bundleStatus = BundleStatus.STARTING_STOPPING;
break;
}
}
}
mLog.Debug("Status: " + status + ", result: " + bundleStatus + " (response time: " + pRepsonseTime + "ms)");
return bundleStatus;
}
private static Hashtable GetHashtable(object pJsonObject) {
if (pJsonObject is Hashtable) {
return (Hashtable)pJsonObject;
}
else {
return new Hashtable();
}
}
private static ArrayList GetArrayList(object pJsonObject) {
if (pJsonObject is ArrayList) {
return (ArrayList)pJsonObject;
}
else {
return new ArrayList();
}
}
}
}
| |
namespace CustomersSampleService.Migrations
{
using System;
using System.Collections.Generic;
using System.Data.Entity.Infrastructure.Annotations;
using System.Data.Entity.Migrations;
public partial class V0 : DbMigration
{
public override void Up()
{
CreateTable(
"customerssample.Customers",
c => new
{
Id = c.String(nullable: false, maxLength: 128,
annotations: new Dictionary<string, AnnotationValues>
{
{
"ServiceTableColumn",
new AnnotationValues(oldValue: null, newValue: "Id")
},
}),
Name = c.String(),
Version = c.Binary(nullable: false, fixedLength: true, timestamp: true, storeType: "rowversion",
annotations: new Dictionary<string, AnnotationValues>
{
{
"ServiceTableColumn",
new AnnotationValues(oldValue: null, newValue: "Version")
},
}),
CreatedAt = c.DateTimeOffset(nullable: false, precision: 7,
annotations: new Dictionary<string, AnnotationValues>
{
{
"ServiceTableColumn",
new AnnotationValues(oldValue: null, newValue: "CreatedAt")
},
}),
UpdatedAt = c.DateTimeOffset(precision: 7,
annotations: new Dictionary<string, AnnotationValues>
{
{
"ServiceTableColumn",
new AnnotationValues(oldValue: null, newValue: "UpdatedAt")
},
}),
Deleted = c.Boolean(nullable: false,
annotations: new Dictionary<string, AnnotationValues>
{
{
"ServiceTableColumn",
new AnnotationValues(oldValue: null, newValue: "Deleted")
},
}),
})
.PrimaryKey(t => t.Id)
.Index(t => t.CreatedAt, clustered: true);
CreateTable(
"customerssample.Orders",
c => new
{
Id = c.String(nullable: false, maxLength: 128,
annotations: new Dictionary<string, AnnotationValues>
{
{
"ServiceTableColumn",
new AnnotationValues(oldValue: null, newValue: "Id")
},
}),
Item = c.String(),
Quantity = c.Int(nullable: false),
Completed = c.Boolean(nullable: false),
CustomerId = c.String(nullable: false, maxLength: 128),
Version = c.Binary(nullable: false, fixedLength: true, timestamp: true, storeType: "rowversion",
annotations: new Dictionary<string, AnnotationValues>
{
{
"ServiceTableColumn",
new AnnotationValues(oldValue: null, newValue: "Version")
},
}),
CreatedAt = c.DateTimeOffset(nullable: false, precision: 7,
annotations: new Dictionary<string, AnnotationValues>
{
{
"ServiceTableColumn",
new AnnotationValues(oldValue: null, newValue: "CreatedAt")
},
}),
UpdatedAt = c.DateTimeOffset(precision: 7,
annotations: new Dictionary<string, AnnotationValues>
{
{
"ServiceTableColumn",
new AnnotationValues(oldValue: null, newValue: "UpdatedAt")
},
}),
Deleted = c.Boolean(nullable: false,
annotations: new Dictionary<string, AnnotationValues>
{
{
"ServiceTableColumn",
new AnnotationValues(oldValue: null, newValue: "Deleted")
},
}),
})
.PrimaryKey(t => t.Id)
.ForeignKey("customerssample.Customers", t => t.CustomerId)
.Index(t => t.CustomerId)
.Index(t => t.CreatedAt, clustered: true);
}
public override void Down()
{
DropForeignKey("customerssample.Orders", "CustomerId", "customerssample.Customers");
DropIndex("customerssample.Orders", new[] { "CreatedAt" });
DropIndex("customerssample.Orders", new[] { "CustomerId" });
DropIndex("customerssample.Customers", new[] { "CreatedAt" });
DropTable("customerssample.Orders",
removedColumnAnnotations: new Dictionary<string, IDictionary<string, object>>
{
{
"CreatedAt",
new Dictionary<string, object>
{
{ "ServiceTableColumn", "CreatedAt" },
}
},
{
"Deleted",
new Dictionary<string, object>
{
{ "ServiceTableColumn", "Deleted" },
}
},
{
"Id",
new Dictionary<string, object>
{
{ "ServiceTableColumn", "Id" },
}
},
{
"UpdatedAt",
new Dictionary<string, object>
{
{ "ServiceTableColumn", "UpdatedAt" },
}
},
{
"Version",
new Dictionary<string, object>
{
{ "ServiceTableColumn", "Version" },
}
},
});
DropTable("customerssample.Customers",
removedColumnAnnotations: new Dictionary<string, IDictionary<string, object>>
{
{
"CreatedAt",
new Dictionary<string, object>
{
{ "ServiceTableColumn", "CreatedAt" },
}
},
{
"Deleted",
new Dictionary<string, object>
{
{ "ServiceTableColumn", "Deleted" },
}
},
{
"Id",
new Dictionary<string, object>
{
{ "ServiceTableColumn", "Id" },
}
},
{
"UpdatedAt",
new Dictionary<string, object>
{
{ "ServiceTableColumn", "UpdatedAt" },
}
},
{
"Version",
new Dictionary<string, object>
{
{ "ServiceTableColumn", "Version" },
}
},
});
}
}
}
| |
#region License
// Copyright (c) Jeremy Skinner (http://www.jeremyskinner.co.uk)
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// The latest version of this file can be found at https://github.com/jeremyskinner/FluentValidation
#endregion
namespace FluentValidation.Tests {
using System;
using System.Diagnostics;
using System.Globalization;
using System.Linq;
using System.Threading;
using Internal;
using Xunit;
using Resources;
using Validators;
public class LocalisedMessagesTester : IDisposable {
public LocalisedMessagesTester() {
// ensure the resource provider is reset after any tests that use it.
CultureScope.SetDefaultCulture();
}
public void Dispose()
{
CultureScope.SetDefaultCulture();
}
#if!CoreCLR
[Fact]
public void Correctly_assigns_default_localized_error_message() {
var originalCulture = Thread.CurrentThread.CurrentUICulture;
try {
var validator = new TestValidator(v => v.RuleFor(x => x.Surname).NotEmpty());
foreach (var culture in new[] { "en", "de", "fr", "es", "de", "it", "nl", "pl", "pt", "ru", "sv" }) {
Thread.CurrentThread.CurrentUICulture = new CultureInfo(culture);
var message = Messages.ResourceManager.GetString("notempty_error");
var errorMessage = new MessageFormatter().AppendPropertyName("Surname").BuildMessage(message);
Debug.WriteLine(errorMessage);
var result = validator.Validate(new Person{Surname = null});
result.Errors.Single().ErrorMessage.ShouldEqual(errorMessage);
}
}
finally {
// Always reset the culture.
Thread.CurrentThread.CurrentUICulture = originalCulture;
}
}
#endif
[Fact]
public void ResourceProviderType_overrides_default_messagesnote() {
ValidatorOptions.ResourceProviderType = typeof(MyResources);
var validator = new TestValidator() {
v => v.RuleFor(x => x.Surname).NotEmpty()
};
var result = validator.Validate(new Person());
result.Errors.Single().ErrorMessage.ShouldEqual("foo");
}
[Fact]
public void Sets_localised_message_via_expression() {
var validator = new TestValidator();
validator.RuleFor(x => x.Surname).NotEmpty().WithLocalizedMessage(() => MyResources.notempty_error);
var result = validator.Validate(new Person());
result.Errors.Single().ErrorMessage.ShouldEqual("foo");
}
[Fact]
public void Sets_localised_message_via_type_name() {
var validator = new TestValidator();
validator.RuleFor(x => x.Surname).NotEmpty().WithLocalizedMessage(typeof(MyResources), nameof(MyResources.notempty_error));
var result = validator.Validate(new Person());
result.Errors.Single().ErrorMessage.ShouldEqual("foo");
}
[Fact]
public void When_using_explicitly_localized_message_does_not_fall_back_to_ResourceProvider_with_expression() {
ValidatorOptions.ResourceProviderType = typeof(MyResources);
var validator = new TestValidator {
v => v.RuleFor(x => x.Surname).NotEmpty().WithLocalizedMessage(() => MyOverridenResources.notempty_error)
};
var results = validator.Validate(new Person());
results.Errors.Single().ErrorMessage.ShouldEqual("bar");
}
[Fact]
public void When_using_explicitly_localized_message_does_not_fall_back_to_ResourceProvider_with_type()
{
ValidatorOptions.ResourceProviderType = typeof(MyResources);
var validator = new TestValidator {
v => v.RuleFor(x => x.Surname).NotEmpty().WithLocalizedMessage(typeof(MyOverridenResources), nameof(MyOverridenResources.notempty_error))
};
var results = validator.Validate(new Person());
results.Errors.Single().ErrorMessage.ShouldEqual("bar");
}
[Fact]
public void Custom_property_validators_should_respect_ResourceProvider() {
ValidatorOptions.ResourceProviderType = typeof(MyResources);
var validator = new TestValidator {
v => v.RuleFor(x => x.Surname).SetValidator(new MyPropertyValidator())
};
var results = validator.Validate(new Person());
results.Errors.Single().ErrorMessage.ShouldEqual("foo");
}
[Fact]
public void When_using_explicitly_localized_message_with_custom_validator_does_not_fall_back_to_ResourceProvider_expression() {
ValidatorOptions.ResourceProviderType = typeof(MyResources);
var validator = new TestValidator {
v => v.RuleFor(x => x.Surname).SetValidator(new MyPropertyValidator())
.WithLocalizedMessage(() => MyOverridenResources.notempty_error)
};
var results = validator.Validate(new Person());
results.Errors.Single().ErrorMessage.ShouldEqual("bar");
}
[Fact]
public void When_using_explicitly_localized_message_with_custom_validator_does_not_fall_back_to_ResourceProvider()
{
ValidatorOptions.ResourceProviderType = typeof(MyResources);
var validator = new TestValidator {
v => v.RuleFor(x => x.Surname).SetValidator(new MyPropertyValidator())
.WithLocalizedMessage(typeof(MyOverridenResources), nameof(MyOverridenResources.notempty_error))
};
var results = validator.Validate(new Person());
results.Errors.Single().ErrorMessage.ShouldEqual("bar");
}
[Fact]
public void Can_use_placeholders_with_localized_messages_expression() {
var validator = new TestValidator {
v => v.RuleFor(x => x.Surname).NotNull().WithLocalizedMessage(() => TestMessages.PlaceholderMessage, 1)
};
var result = validator.Validate(new Person());
result.Errors.Single().ErrorMessage.ShouldEqual("Test 1");
}
[Fact]
public void Can_use_placeholders_with_localized_messages()
{
var validator = new TestValidator {
v => v.RuleFor(x => x.Surname).NotNull().WithLocalizedMessage(typeof(TestMessages), nameof(TestMessages.PlaceholderMessage), 1)
};
var result = validator.Validate(new Person());
result.Errors.Single().ErrorMessage.ShouldEqual("Test 1");
}
[Fact]
public void Can_use_placeholders_with_localized_messages_using_expressions_when_resource_is_expression() {
var validator = new TestValidator {
v => v.RuleFor(x => x.Surname).NotNull().WithLocalizedMessage(() => TestMessages.PlaceholderMessage, x => 1)
};
var result = validator.Validate(new Person());
result.Errors.Single().ErrorMessage.ShouldEqual("Test 1");
}
[Fact]
public void Can_use_placeholders_with_localized_messages_using_expressions()
{
var validator = new TestValidator {
v => v.RuleFor(x => x.Surname).NotNull().WithLocalizedMessage(typeof(TestMessages), nameof(TestMessages.PlaceholderMessage), x => 1)
};
var result = validator.Validate(new Person());
result.Errors.Single().ErrorMessage.ShouldEqual("Test 1");
}
[Fact]
public void Setting_global_resource_provider_propogates_to_metadata() {
ValidatorOptions.ResourceProviderType = typeof(TestMessages);
var validator = new TestValidator();
validator.RuleFor(x => x.Forename).NotNull();
var descriptor = validator.CreateDescriptor();
var resourceType = descriptor.GetMembersWithValidators().First().First().ErrorMessageSource.ResourceType;
Assert.Equal(typeof (TestMessages), resourceType);
}
[Fact]
public void Not_Setting_global_resource_provider_uses_default_messages_in_metadata()
{
var validator = new TestValidator();
validator.RuleFor(x => x.Forename).NotNull();
var descriptor = validator.CreateDescriptor();
var resourceType = descriptor.GetMembersWithValidators().First().First().ErrorMessageSource.ResourceType;
Assert.Equal(typeof(Messages), resourceType);
}
[Fact]
public void Uses_func_to_get_message() {
var validator = new TestValidator();
validator.RuleFor(x => x.Forename).NotNull().WithMessage(x => "el foo");
var result = validator.Validate(new Person());
result.Errors[0].ErrorMessage.ShouldEqual("el foo");
}
private class MyResources {
public static string notempty_error {
get { return "foo"; }
}
}
private class MyOverridenResources {
public static string notempty_error {
get { return "bar"; }
}
}
private class MyPropertyValidator : PropertyValidator {
public MyPropertyValidator() : base(nameof(MyOverridenResources.notempty_error), typeof(MyOverridenResources)) {
}
protected override bool IsValid(PropertyValidatorContext context) {
return false;
}
}
}
}
| |
/***************************************************************************
* ActionManager.cs
*
* Copyright (C) 2005-2006 Novell, Inc.
* Written by Aaron Bockover <[email protected]>
****************************************************************************/
/* THIS FILE IS LICENSED UNDER THE MIT LICENSE AS OUTLINED IMMEDIATELY BELOW:
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
using System;
using System.Collections;
using Gtk;
using Mono.Unix;
namespace Banshee.Base
{
public class ActionManager : IEnumerable
{
private UIManager ui = new UIManager();
private ActionGroup global_actions = new ActionGroup("Global");
private ActionGroup playlist_actions = new ActionGroup("Playlist");
private ActionGroup song_actions = new ActionGroup("Song");
private ActionGroup audio_cd_actions = new ActionGroup("AudioCD");
private ActionGroup playback_actions = new ActionGroup("Playback");
private ActionGroup playback_seek_actions = new ActionGroup("PlaybackSeek");
private ActionGroup dap_actions = new ActionGroup("Dap");
public ActionManager()
{
PopulateActionGroups();
}
#if !win32
public void LoadInterface()
{
ui.AddUiFromResource("UIManagerLayout.xml");
Gtk.Window.DefaultIconName = "music-player-banshee";
}
#endif
private void PopulateActionGroups()
{
/* Global Actions */
global_actions.Add(new ActionEntry [] {
new ActionEntry("MusicMenuAction", null,
Catalog.GetString("_Music"), null, null, null),
new ActionEntry("NewPlaylistAction", Stock.New,
Catalog.GetString("_New Playlist"), "<control>N",
Catalog.GetString("Create a new empty playlist"), null),
new ActionEntry("ImportFolderAction", Stock.Open,
Catalog.GetString("Import _Folder..."), "<control>O",
Catalog.GetString("Import the contents of an entire folder"), null),
new ActionEntry("ImportFilesAction", Stock.Open,
Catalog.GetString("Import Files..."), null,
Catalog.GetString("Import files inside a folder"), null),
new ActionEntry("ImportMusicAction", Stock.Open,
Catalog.GetString("Import _Music..."), "<control>I",
Catalog.GetString("Import music from a variety of sources"), null),
new ActionEntry("OpenLocationAction", null,
Catalog.GetString("Open _Location..."), "<control>L",
Catalog.GetString("Open a remote location for playback"), null),
new ActionEntry("WriteCDAction", null,
Catalog.GetString("Write CD"), null,
Catalog.GetString("Write selection to audio CD"), null),
new ActionEntry("ImportSourceAction", null,
Catalog.GetString("Import Source"), null,
Catalog.GetString("Import source to library"), null),
new ActionEntry("SelectedSourcePropertiesAction", Stock.Properties,
"Source Properties", null,
null, null),
new ActionEntry("ScriptsAction", null,
Catalog.GetString("User Scripts"), null,
Catalog.GetString("Run available user scripts"), null),
new ActionEntry("QuitAction", Stock.Quit,
Catalog.GetString("_Quit"), "<control>Q",
Catalog.GetString("Quit Banshee"), null),
new ActionEntry("EditMenuAction", null,
Catalog.GetString("_Edit"), null, null, null),
new ActionEntry("RenameSourceAction", "gtk-edit",
"Rename", "F2",
"Rename", null),
new ActionEntry("UnmapSourceAction", Stock.Delete,
"Unmap", "<shift>Delete",
null, null),
new ActionEntry("SelectAllAction", null,
Catalog.GetString("Select _All"), "<control>A",
Catalog.GetString("Select all songs in song list"), null),
new ActionEntry("SelectNoneAction", null,
Catalog.GetString("Select _None"), "<control><shift>A",
Catalog.GetString("Unselect all songs in song list"), null),
new ActionEntry("JumpToPlayingAction", null,
Catalog.GetString("_Jump to playing song"), "<control>J",
null, null),
new ActionEntry("PluginsAction", null,
Catalog.GetString("Plu_gins..."), null,
Catalog.GetString("Configure Banshee plugins"), null),
new ActionEntry("PreferencesAction", Stock.Preferences, null),
new ActionEntry("ToolsMenuAction", null,
Catalog.GetString("_Tools"), null, null, null),
new ActionEntry("ViewMenuAction", null,
Catalog.GetString("_View"), null, null, null),
new ActionEntry("ColumnsAction", null,
Catalog.GetString("_Columns..."), null,
Catalog.GetString("Select which columns to display in the song list"), null),
new ActionEntry("ShellAction", null,
Catalog.GetString("_Boo Buddy..."), "<control><shift>S",
Catalog.GetString("Open Boo Buddy"), delegate {
BooBuddy.BooBuddyWindow boo_buddy = new BooBuddy.BooBuddyWindow();
boo_buddy.Show();
}),
new ActionEntry("ShowEqualizerAction", null,
Catalog.GetString("Equalizer"), null,
Catalog.GetString("Display the equalizer."), null),
new ActionEntry("LoggedEventsAction", null,
Catalog.GetString("_Logged Events Viewer..."), null,
Catalog.GetString("View a detailed log of events"), null),
new ActionEntry("HelpMenuAction", null,
Catalog.GetString("_Help"), null, null, null),
new ActionEntry("VersionInformationAction", null,
Catalog.GetString("_Version Information..."), null,
Catalog.GetString("View detailed version and configuration information"), null),
new ActionEntry("WebMenuAction", null,
Catalog.GetString("_Web Resources"), null, null, null),
new ActionEntry("WikiGuideAction", Stock.Help,
Catalog.GetString("Banshee _User Guide (Wiki)"), null,
Catalog.GetString("Learn about how to use Banshee"), delegate {
Banshee.Web.Browser.Open("http://banshee-project.org/Guide");
}),
new ActionEntry("WikiAction", null,
Catalog.GetString("Banshee _Home Page"), null,
Catalog.GetString("Visit the Banshee Home Page"), delegate {
Banshee.Web.Browser.Open("http://banshee-project.org/");
}),
new ActionEntry("WikiDeveloperAction", null,
Catalog.GetString("_Get Involved"), null,
Catalog.GetString("Become a contributor to Banshee"), delegate {
Banshee.Web.Browser.Open("http://banshee-project.org/Developers");
}),
new ActionEntry("AboutAction", "gtk-about", null),
new ActionEntry("PlaybackMenuAction", null,
Catalog.GetString("_Playback"), null, null, null),
new ActionEntry("SourceMenuAction", null,
Catalog.GetString("Source"), null, null, null),
new ActionEntry("SongViewPopupAction", null,
Catalog.GetString("Song Menu"), null, null, null),
new ActionEntry("DebugMenuAction", null,
Catalog.GetString("_Debug"), null, null, null)
});
global_actions.Add(new ToggleActionEntry [] {
new ToggleActionEntry("FullScreenAction", null,
Catalog.GetString("_Fullscreen"), "F11",
Catalog.GetString("Toggle Fullscreen Mode"), null, false),
new ToggleActionEntry("ShowCoverArtAction", null,
Catalog.GetString("Show Cover _Art"), null,
Catalog.GetString("Toggle display of album cover art"), null, false),
});
global_actions.GetAction("ShowEqualizerAction").Visible = false;
ui.InsertActionGroup(global_actions, 0);
/* Song Selected Actions */
song_actions.Add(new ActionEntry [] {
new ActionEntry("CopySongsAction", Stock.Copy,
Catalog.GetString("_Copy"), "<Control>C",
Catalog.GetString("Copy selected song(s) to clipboard"), null),
new ActionEntry("RemoveSongsAction", Stock.Remove,
Catalog.GetString("_Remove"), "Delete",
Catalog.GetString("Remove selected song(s) from library"), null),
new ActionEntry("DeleteSongsFromDriveAction", null,
Catalog.GetString("_Delete From Drive"), null,
Catalog.GetString("Permanently delete selected song(s) from storage medium"), null),
new ActionEntry("PropertiesAction", Stock.Edit,
Catalog.GetString("_Edit Song Metadata"), null,
Catalog.GetString("Edit metadata on selected songs"), null),
new ActionEntry("SearchMenuAction", Stock.Find,
Catalog.GetString("_Search for songs"), null,
Catalog.GetString("Search for songs matching certain criteria"), null),
new ActionEntry("SearchForSameAlbumAction", null,
Catalog.GetString("By matching _album"), null,
Catalog.GetString("Search all songs of this album"), null),
new ActionEntry("SearchForSameArtistAction", null,
Catalog.GetString("By matching a_rtist"), null,
Catalog.GetString("Search all songs of this artist"), null),
new ActionEntry("SearchForSameGenreAction", null,
Catalog.GetString("By matching _genre"), null,
Catalog.GetString("Search all songs of this genre"), null),
new ActionEntry("AddToPlaylistAction", null,
Catalog.GetString("Add _to Playlist"), null,
Catalog.GetString("Append selected songs to playlist or create new playlist from selection"), null),
new ActionEntry("RatingAction", null,
Catalog.GetString("Ratin_g"), null,
Catalog.GetString("Set rating for selected songs"), null)
});
ui.InsertActionGroup(song_actions, 0);
/* Audio CD Selected Actions */
audio_cd_actions.Add(new ActionEntry [] {
new ActionEntry("ImportCDAction", null,
Catalog.GetString("Import CD"), null,
Catalog.GetString("Import audio CD to library"), null)
});
ui.InsertActionGroup(audio_cd_actions, 0);
/* Playback Actions */
playback_actions.Add(new ActionEntry [] {
new ActionEntry("PlayPauseAction", "media-playback-start",
Catalog.GetString("_Play"), "space",
Catalog.GetString("Play or pause the current song"), null),
new ActionEntry("NextAction", "media-skip-forward",
Catalog.GetString("_Next"), "N",
Catalog.GetString("Play the next song"), null),
new ActionEntry("PreviousAction", "media-skip-backward",
Catalog.GetString("Pre_vious"), "B",
Catalog.GetString("Play the previous song"), null),
});
playback_actions.Add(new RadioActionEntry [] {
new RadioActionEntry("RepeatNoneAction", null,
Catalog.GetString("Repeat N_one"), null,
Catalog.GetString("Do not repeat playlist"), 0),
new RadioActionEntry("RepeatAllAction", null,
Catalog.GetString("Repeat _All"), null,
Catalog.GetString("Play all songs before repeating playlist"), 1),
new RadioActionEntry("RepeatSingleAction", null,
Catalog.GetString("Repeat Si_ngle"), null,
Catalog.GetString("Repeat the current playing song"), 2)
}, 0, null);
playback_actions.Add(new ToggleActionEntry [] {
new ToggleActionEntry("ShuffleAction", "media-playlist-shuffle",
Catalog.GetString("Shu_ffle"), null,
Catalog.GetString("Toggle between shuffle or continuous playback modes"), null, false),
new ToggleActionEntry("StopWhenFinishedAction", null,
Catalog.GetString("_Stop when finished"), "<Shift>space",
Catalog.GetString("Stop playback after the current song finishes playing"), null, false)
});
ui.InsertActionGroup(playback_actions, 0);
/* Playback Seeking Actions */
playback_seek_actions.Add(new ActionEntry [] {
new ActionEntry("SeekBackwardAction", "media-seek-backward",
Catalog.GetString("Seek _Backward"), "<control>Left",
Catalog.GetString("Seek backward in current song"), null),
new ActionEntry("SeekForwardAction", "media-seek-forward",
Catalog.GetString("Seek _Forward"), "<control>Right",
Catalog.GetString("Seek forward in current song"), null),
new ActionEntry("SeekToAction", null,
Catalog.GetString("Seek _To..."), "T",
Catalog.GetString("Seek to a specific location in current song"), null),
new ActionEntry("RestartSongAction", null,
Catalog.GetString("_Restart Song"), "R",
Catalog.GetString("Restart the current song"), null)
});
ui.InsertActionGroup(playback_seek_actions, 0);
/* DAP Actions */
dap_actions.Add(new ActionEntry [] {
new ActionEntry("SyncDapAction", null,
Catalog.GetString("Synchronize"), null,
Catalog.GetString("Save changes to device or synchronize music library"), null)
});
ui.InsertActionGroup(dap_actions, 0);
this["DebugMenuAction"].Visible = Globals.ArgumentQueue.Contains("debug");
this["ShellAction"].Visible = Globals.ArgumentQueue.Contains("debug");
}
public Action FindActionByName(string actionName)
{
foreach(ActionGroup group in ui.ActionGroups) {
foreach(Action action in group.ListActions()) {
if(action.Name == actionName) {
return action;
}
}
}
return null;
}
public Action this [string widgetPathOrActionName] {
get {
Action action = FindActionByName(widgetPathOrActionName);
if(action == null) {
return ui.GetAction(widgetPathOrActionName);
}
return action;
}
}
public Widget GetWidget(string widgetPath)
{
return ui.GetWidget(widgetPath);
}
public void SetActionLabel(string actionName, string label)
{
this[actionName].Label = label;
Banshee.Widgets.ActionButton.SyncButtons();
}
public void SetActionIcon(string actionName, string icon)
{
this[actionName].StockId = icon;
Banshee.Widgets.ActionButton.SyncButtons();
}
public void UpdateAction(string actionName, string label, string icon)
{
Action action = this[actionName];
action.Label = label;
action.StockId = icon;
Banshee.Widgets.ActionButton.SyncButtons();
}
public IEnumerator GetEnumerator()
{
foreach(ActionGroup group in ui.ActionGroups) {
foreach(Action action in group.ListActions()) {
yield return action;
}
}
}
public UIManager UI {
get { return ui; }
}
public ActionGroup GlobalActions {
get { return global_actions; }
}
public ActionGroup PlaylistActions {
get { return playlist_actions; }
}
public ActionGroup SongActions {
get { return song_actions; }
}
public ActionGroup AudioCdActions {
get { return audio_cd_actions; }
}
public ActionGroup PlaybackActions {
get { return playback_actions; }
}
public ActionGroup PlaybackSeekActions {
get { return playback_seek_actions; }
}
public ActionGroup DapActions {
get { return dap_actions; }
}
}
}
| |
// Copyright (c) ppy Pty Ltd <[email protected]>. Licensed under the MIT Licence.
// See the LICENCE file in the repository root for full licence text.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using Commons.Music.Midi;
using osu.Framework.Input.StateChanges;
using osu.Framework.Logging;
using osu.Framework.Platform;
using osu.Framework.Statistics;
using osu.Framework.Threading;
namespace osu.Framework.Input.Handlers.Midi
{
public class MidiHandler : InputHandler
{
public override string Description => "MIDI";
public override bool IsActive => active;
private bool active = true;
private ScheduledDelegate scheduledRefreshDevices;
private readonly Dictionary<string, IMidiInput> openedDevices = new Dictionary<string, IMidiInput>();
/// <summary>
/// The last event for each midi device. This is required for Running Status (repeat messages sent without
/// event type).
/// </summary>
private readonly Dictionary<string, byte> runningStatus = new Dictionary<string, byte>();
public override bool Initialize(GameHost host)
{
if (!base.Initialize(host))
return false;
Enabled.BindValueChanged(e =>
{
if (e.NewValue)
{
host.InputThread.Scheduler.Add(scheduledRefreshDevices = new ScheduledDelegate(() => refreshDevices(), 0, 500));
}
else
{
scheduledRefreshDevices?.Cancel();
lock (openedDevices)
{
foreach (var device in openedDevices.Values)
closeDevice(device);
openedDevices.Clear();
}
}
}, true);
return refreshDevices();
}
private bool refreshDevices()
{
try
{
var inputs = MidiAccessManager.Default.Inputs.ToList();
lock (openedDevices)
{
// check removed devices
foreach (string key in openedDevices.Keys.ToArray())
{
var device = openedDevices[key];
if (inputs.All(i => i.Id != key))
{
closeDevice(device);
openedDevices.Remove(key);
Logger.Log($"Disconnected MIDI device: {device.Details.Name}");
}
}
// check added devices
foreach (IMidiPortDetails input in inputs)
{
if (openedDevices.All(x => x.Key != input.Id))
{
var newInput = MidiAccessManager.Default.OpenInputAsync(input.Id).Result;
newInput.MessageReceived += onMidiMessageReceived;
openedDevices[input.Id] = newInput;
Logger.Log($"Connected MIDI device: {newInput.Details.Name}");
}
}
}
return true;
}
catch (Exception e)
{
Logger.Error(e, RuntimeInfo.OS == RuntimeInfo.Platform.Linux
? "Couldn't list input devices. Is libasound2-dev installed?"
: "Couldn't list input devices. There may be another application already using MIDI.");
active = false;
return false;
}
}
private void closeDevice(IMidiInput device)
{
device.MessageReceived -= onMidiMessageReceived;
// some devices may take some time to close, so this should be fire-and-forget.
// the internal implementations look to have their own (eventual) timeout logic.
Task.Factory.StartNew(() => device.CloseAsync(), TaskCreationOptions.LongRunning);
}
private void onMidiMessageReceived(object sender, MidiReceivedEventArgs e)
{
Debug.Assert(sender is IMidiInput);
var senderId = ((IMidiInput)sender).Details.Id;
try
{
for (int i = e.Start; i < e.Length;)
{
readEvent(e.Data, senderId, ref i, out byte eventType, out byte key, out byte velocity);
dispatchEvent(eventType, key, velocity);
}
}
catch (Exception exception)
{
var dataString = string.Join("-", e.Data.Select(b => b.ToString("X2")));
Logger.Error(exception, $"An exception occurred while reading MIDI data from sender {senderId}: {dataString}");
}
}
/// <remarks>
/// This function is not intended to provide complete correctness of MIDI parsing.
/// For now the goal is to correctly parse "note start" and "note end" events and correctly delimit all events.
/// </remarks>
private void readEvent(byte[] data, string senderId, ref int i, out byte eventType, out byte key, out byte velocity)
{
byte statusType = data[i++];
// continuation messages:
// need running status to be interpreted correctly
if (statusType <= 0x7F)
{
if (!runningStatus.ContainsKey(senderId))
throw new InvalidDataException($"Received running status of sender {senderId}, but no event type was stored");
eventType = runningStatus[senderId];
key = statusType;
velocity = data[i++];
return;
}
// real-time messages:
// 0 additional data bytes always, do not reset running status
if (statusType >= 0xF8)
{
eventType = statusType;
key = velocity = 0;
return;
}
// system common messages:
// variable number of additional data bytes, reset running status
if (statusType >= 0xF0)
{
eventType = statusType;
// system exclusive message
// vendor-specific, terminated by 0xF7
// ignoring their whole contents for now since we can't do anything with them anyway
if (statusType == 0xF0)
{
while (data[i - 1] != 0xF7)
i++;
key = velocity = 0;
}
// other common system messages
// fixed size given by MidiEvent.FixedDataSize
else
{
key = MidiEvent.FixedDataSize(statusType) >= 1 ? data[i++] : (byte)0;
velocity = MidiEvent.FixedDataSize(statusType) == 2 ? data[i++] : (byte)0;
}
runningStatus.Remove(senderId);
return;
}
// channel messages
// fixed size (varying per event type), set running status
eventType = statusType;
key = MidiEvent.FixedDataSize(statusType) >= 1 ? data[i++] : (byte)0;
velocity = MidiEvent.FixedDataSize(statusType) == 2 ? data[i++] : (byte)0;
runningStatus[senderId] = eventType;
}
private void dispatchEvent(byte eventType, byte key, byte velocity)
{
Logger.Log($"Handling MIDI event {eventType:X2}:{key:X2}:{velocity:X2}");
// Low nibble only contains channel data in note on/off messages
// Ignore to receive messages from all channels
switch (eventType & 0xF0)
{
case MidiEvent.NoteOn when velocity != 0:
Logger.Log($"NoteOn: {(MidiKey)key}/{velocity / 128f:P}");
PendingInputs.Enqueue(new MidiKeyInput((MidiKey)key, velocity, true));
FrameStatistics.Increment(StatisticsCounterType.MidiEvents);
break;
case MidiEvent.NoteOff:
case MidiEvent.NoteOn when velocity == 0:
Logger.Log($"NoteOff: {(MidiKey)key}/{velocity / 128f:P}");
PendingInputs.Enqueue(new MidiKeyInput((MidiKey)key, 0, false));
FrameStatistics.Increment(StatisticsCounterType.MidiEvents);
break;
}
}
}
}
| |
// Copyright Naked Objects Group Ltd, 45 Station Road, Henley on Thames, UK, RG9 1AT
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License.
// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0.
// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and limitations under the License.
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.ComponentModel.DataAnnotations;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using NakedFramework;
using NakedFramework.Architecture.Component;
using NakedFramework.Architecture.Facet;
using NakedFramework.Architecture.Reflect;
using NakedFramework.Architecture.SpecImmutable;
using NakedFramework.Metamodel.Facet;
using NakedObjects.Reflector.FacetFactory;
// ReSharper disable UnusedMember.Global
// ReSharper disable UnusedMember.Local
namespace NakedObjects.Reflector.Test.FacetFactory;
[TestClass]
public class HiddenAnnotationFacetFactoryTest : AbstractFacetFactoryTest {
private HiddenAnnotationFacetFactory facetFactory;
protected override Type[] SupportedTypes => new[] { typeof(IHiddenFacet) };
protected override IFacetFactory FacetFactory => facetFactory;
[TestMethod]
public void TestDisabledWhenUntilPersistedAnnotationPickedUpOn() {
IImmutableDictionary<string, ITypeSpecBuilder> metamodel = new Dictionary<string, ITypeSpecBuilder>().ToImmutableDictionary();
var actionMethod = FindMethod(typeof(Customer6), "SomeAction");
metamodel = facetFactory.Process(Reflector, actionMethod, MethodRemover, Specification, metamodel);
var facet = Specification.GetFacet(typeof(IHiddenFacet));
var hiddenFacetAbstract = (HiddenFacet)facet;
Assert.AreEqual(WhenTo.UntilPersisted, hiddenFacetAbstract.Value);
Assert.IsNotNull(metamodel);
}
[TestMethod]
public override void TestFeatureTypes() {
var featureTypes = facetFactory.FeatureTypes;
Assert.IsFalse(featureTypes.HasFlag(FeatureType.Objects));
Assert.IsTrue(featureTypes.HasFlag(FeatureType.Properties));
Assert.IsTrue(featureTypes.HasFlag(FeatureType.Collections));
Assert.IsTrue(featureTypes.HasFlag(FeatureType.Actions));
Assert.IsFalse(featureTypes.HasFlag(FeatureType.ActionParameters));
}
[TestMethod]
public void TestHiddenAnnotationPickedUpOnAction() {
IImmutableDictionary<string, ITypeSpecBuilder> metamodel = new Dictionary<string, ITypeSpecBuilder>().ToImmutableDictionary();
var actionMethod = FindMethod(typeof(Customer2), "SomeAction");
metamodel = facetFactory.Process(Reflector, actionMethod, MethodRemover, Specification, metamodel);
var facet = Specification.GetFacet(typeof(IHiddenFacet));
Assert.IsNotNull(facet);
Assert.IsTrue(facet is HiddenFacet);
AssertNoMethodsRemoved();
Assert.IsNotNull(metamodel);
}
[TestMethod]
public void TestHiddenAnnotationPickedUpOnCollection() {
IImmutableDictionary<string, ITypeSpecBuilder> metamodel = new Dictionary<string, ITypeSpecBuilder>().ToImmutableDictionary();
var property = FindProperty(typeof(Customer1), "Orders");
metamodel = facetFactory.Process(Reflector, property, MethodRemover, Specification, metamodel);
var facet = Specification.GetFacet(typeof(IHiddenFacet));
Assert.IsNotNull(facet);
Assert.IsTrue(facet is HiddenFacet);
AssertNoMethodsRemoved();
Assert.IsNotNull(metamodel);
}
[TestMethod]
public void TestHiddenAnnotationPickedUpOnProperty() {
IImmutableDictionary<string, ITypeSpecBuilder> metamodel = new Dictionary<string, ITypeSpecBuilder>().ToImmutableDictionary();
var property = FindProperty(typeof(Customer), "NumberOfOrders");
metamodel = facetFactory.Process(Reflector, property, MethodRemover, Specification, metamodel);
var facet = Specification.GetFacet(typeof(IHiddenFacet));
Assert.IsNotNull(facet);
Assert.IsTrue(facet is HiddenFacet);
AssertNoMethodsRemoved();
Assert.IsNotNull(metamodel);
}
[TestMethod]
public void TestHiddenWhenAlwaysAnnotationPickedUpOn() {
IImmutableDictionary<string, ITypeSpecBuilder> metamodel = new Dictionary<string, ITypeSpecBuilder>().ToImmutableDictionary();
var actionMethod = FindMethod(typeof(Customer3), "SomeAction");
metamodel = facetFactory.Process(Reflector, actionMethod, MethodRemover, Specification, metamodel);
var facet = Specification.GetFacet(typeof(IHiddenFacet));
var hiddenFacetAbstract = (HiddenFacet)facet;
Assert.AreEqual(WhenTo.Always, hiddenFacetAbstract.Value);
Assert.IsNotNull(metamodel);
}
[TestMethod]
public void TestHiddenWhenNeverAnnotationPickedUpOn() {
IImmutableDictionary<string, ITypeSpecBuilder> metamodel = new Dictionary<string, ITypeSpecBuilder>().ToImmutableDictionary();
var actionMethod = FindMethod(typeof(Customer4), "SomeAction");
metamodel = facetFactory.Process(Reflector, actionMethod, MethodRemover, Specification, metamodel);
var facet = Specification.GetFacet(typeof(IHiddenFacet));
var hiddenFacetAbstract = (HiddenFacet)facet;
Assert.AreEqual(WhenTo.Never, hiddenFacetAbstract.Value);
Assert.IsNotNull(metamodel);
}
[TestMethod]
public void TestHiddenWhenOncePersistedAnnotationPickedUpOn() {
IImmutableDictionary<string, ITypeSpecBuilder> metamodel = new Dictionary<string, ITypeSpecBuilder>().ToImmutableDictionary();
var actionMethod = FindMethod(typeof(Customer5), "SomeAction");
metamodel = facetFactory.Process(Reflector, actionMethod, MethodRemover, Specification, metamodel);
var facet = Specification.GetFacet(typeof(IHiddenFacet));
var hiddenFacetAbstract = (HiddenFacet)facet;
Assert.AreEqual(WhenTo.OncePersisted, hiddenFacetAbstract.Value);
Assert.IsNotNull(metamodel);
}
[TestMethod]
public void TestHiidenPriorityOverScaffoldAnnotation() {
IImmutableDictionary<string, ITypeSpecBuilder> metamodel = new Dictionary<string, ITypeSpecBuilder>().ToImmutableDictionary();
var property = FindProperty(typeof(Customer10), "NumberOfOrders");
metamodel = facetFactory.Process(Reflector, property, MethodRemover, Specification, metamodel);
var facet = Specification.GetFacet(typeof(IHiddenFacet));
var hiddenFacetAbstract = (HiddenFacet)facet;
Assert.AreEqual(WhenTo.Always, hiddenFacetAbstract.Value);
Assert.IsNotNull(metamodel);
}
[TestMethod]
public void TestScaffoldAnnotationPickedUpOnCollection() {
IImmutableDictionary<string, ITypeSpecBuilder> metamodel = new Dictionary<string, ITypeSpecBuilder>().ToImmutableDictionary();
var property = FindProperty(typeof(Customer8), "Orders");
metamodel = facetFactory.Process(Reflector, property, MethodRemover, Specification, metamodel);
var facet = Specification.GetFacet(typeof(IHiddenFacet));
Assert.IsNotNull(facet);
Assert.IsTrue(facet is HiddenFacet);
AssertNoMethodsRemoved();
Assert.IsNotNull(metamodel);
}
[TestMethod]
public void TestScaffoldAnnotationPickedUpOnProperty() {
IImmutableDictionary<string, ITypeSpecBuilder> metamodel = new Dictionary<string, ITypeSpecBuilder>().ToImmutableDictionary();
var property = FindProperty(typeof(Customer7), "NumberOfOrders");
metamodel = facetFactory.Process(Reflector, property, MethodRemover, Specification, metamodel);
var facet = Specification.GetFacet(typeof(IHiddenFacet));
Assert.IsNotNull(facet);
Assert.IsTrue(facet is HiddenFacet);
AssertNoMethodsRemoved();
Assert.IsNotNull(metamodel);
}
[TestMethod]
public void TestScaffoldTrueAnnotationPickedUpOn() {
IImmutableDictionary<string, ITypeSpecBuilder> metamodel = new Dictionary<string, ITypeSpecBuilder>().ToImmutableDictionary();
var property = FindProperty(typeof(Customer9), "NumberOfOrders");
metamodel = facetFactory.Process(Reflector, property, MethodRemover, Specification, metamodel);
var facet = Specification.GetFacet(typeof(IHiddenFacet));
var hiddenFacetAbstract = (HiddenFacet)facet;
Assert.AreEqual(WhenTo.Never, hiddenFacetAbstract.Value);
Assert.IsNotNull(metamodel);
}
#region Nested type: Customer
private class Customer {
[Hidden(WhenTo.Always)]
public int NumberOfOrders => 0;
}
#endregion
#region Nested type: Customer1
private class Customer1 {
[Hidden(WhenTo.Always)]
public IList Orders => null;
}
#endregion
#region Nested type: Customer10
private class Customer10 {
[Hidden(WhenTo.Always)]
[ScaffoldColumn(true)]
public int NumberOfOrders => 0;
}
#endregion
#region Nested type: Customer2
private class Customer2 {
[Hidden(WhenTo.Always)]
public void SomeAction() { }
}
#endregion
#region Nested type: Customer3
private class Customer3 {
[Hidden(WhenTo.Always)]
public void SomeAction() { }
}
#endregion
#region Nested type: Customer4
private class Customer4 {
[Hidden(WhenTo.Never)]
public void SomeAction() { }
}
#endregion
#region Nested type: Customer5
private class Customer5 {
[Hidden(WhenTo.OncePersisted)]
public void SomeAction() { }
}
#endregion
#region Nested type: Customer6
private class Customer6 {
[Hidden(WhenTo.UntilPersisted)]
public void SomeAction() { }
}
#endregion
#region Nested type: Customer7
private class Customer7 {
[ScaffoldColumn(false)]
public int NumberOfOrders => 0;
}
#endregion
#region Nested type: Customer8
private class Customer8 {
[ScaffoldColumn(false)]
public IList Orders => null;
}
#endregion
#region Nested type: Customer9
private class Customer9 {
[ScaffoldColumn(true)]
public int NumberOfOrders => 0;
}
#endregion
#region Setup/Teardown
[TestInitialize]
public override void SetUp() {
base.SetUp();
facetFactory = new HiddenAnnotationFacetFactory(GetOrder<HiddenAnnotationFacetFactory>(), LoggerFactory);
}
[TestCleanup]
public override void TearDown() {
facetFactory = null;
base.TearDown();
}
#endregion
}
// Copyright (c) Naked Objects Group Ltd.
// ReSharper restore UnusedMember.Local
| |
/* Copyright (C) 2008-2018 Peter Palotas, Jeffrey Jangli, Alexandr Normuradov
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
using System;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.IO;
using System.Security;
using System.Text;
namespace Alphaleonis.Win32.Filesystem
{
public static partial class Path
{
internal static void CheckInvalidUncPath(string path)
{
// Tackle: Path.GetFullPath(@"\\\\.txt"), but exclude "." which is the current directory.
if (!IsLongPath(path) && path.StartsWith(UncPrefix, StringComparison.Ordinal))
{
var tackle = GetRegularPathCore(path, GetFullPathOptions.None, false).TrimStart(DirectorySeparatorChar, AltDirectorySeparatorChar);
if (tackle.Length >= 2 && tackle[0] == CurrentDirectoryPrefixChar)
throw new ArgumentException(Resources.UNC_Path_Should_Match_Format, "path");
}
}
/// <summary>Checks that the given path format is supported.</summary>
/// <exception cref="ArgumentException"/>
/// <exception cref="ArgumentNullException"/>
/// <exception cref="NotSupportedException"/>
/// <param name="path">A path to the file or directory.</param>
/// <param name="checkInvalidPathChars">Checks that the path contains only valid path-characters.</param>
/// <param name="checkAdditional">.</param>
internal static void CheckSupportedPathFormat(string path, bool checkInvalidPathChars, bool checkAdditional)
{
// "."
if (Utils.IsNullOrWhiteSpace(path) || path.Length == 1)
return;
var regularPath = GetRegularPathCore(path, GetFullPathOptions.None, false);
var isArgumentException = regularPath[0] == VolumeSeparatorChar;
var throwException = isArgumentException || regularPath.Length >= 2 && regularPath.IndexOf(VolumeSeparatorChar, 2) != -1;
if (throwException)
{
if (isArgumentException)
throw new ArgumentException(string.Format(CultureInfo.InvariantCulture, Resources.Unsupported_Path_Format, regularPath), "path");
throw new NotSupportedException(string.Format(CultureInfo.InvariantCulture, Resources.Unsupported_Path_Format, regularPath));
}
if (checkInvalidPathChars)
CheckInvalidPathChars(path, checkAdditional, false);
}
/// <summary>Checks that the path contains only valid path-characters.</summary>
/// <exception cref="ArgumentException"/>
/// <exception cref="ArgumentNullException"/>
/// <param name="path">A path to the file or directory.</param>
/// <param name="checkAdditional"><c>true</c> also checks for ? and * characters.</param>
/// <param name="allowEmpty">When <c>false</c>, throws an <see cref="ArgumentException"/>.</param>
[SecurityCritical]
private static void CheckInvalidPathChars(string path, bool checkAdditional, bool allowEmpty)
{
if (null == path)
throw new ArgumentNullException("path");
if (!allowEmpty && (path.Trim().Length == 0 || Utils.IsNullOrWhiteSpace(path)))
throw new ArgumentException(Resources.Path_Is_Zero_Length_Or_Only_White_Space, "path");
// Will fail on a Unicode path.
var pathRp = GetRegularPathCore(path, GetFullPathOptions.None, allowEmpty);
// Handle "\\?\GlobalRoot\" and "\\?\Volume" prefixes.
if (pathRp.StartsWith(GlobalRootPrefix, StringComparison.OrdinalIgnoreCase))
pathRp = pathRp.ReplaceIgnoreCase(GlobalRootPrefix, string.Empty);
if (pathRp.StartsWith(VolumePrefix, StringComparison.OrdinalIgnoreCase))
pathRp = pathRp.ReplaceIgnoreCase(VolumePrefix, string.Empty);
for (int index = 0, l = pathRp.Length; index < l; ++index)
{
int num = pathRp[index];
switch (num)
{
case 34: // " (quote)
case 60: // < (less than)
case 62: // > (greater than)
case 124: // | (pipe)
throw new ArgumentException(string.Format(CultureInfo.InvariantCulture, Resources.Illegal_Characters_In_Path, (char) num), "path");
default:
// 32: space
if (num >= 32 && (!checkAdditional || num != WildcardQuestionChar && num != WildcardStarMatchAllChar))
continue;
goto case 34;
}
}
}
[SecurityCritical]
internal static string GetCleanExceptionPath(string path)
{
return GetRegularPathCore(path, GetFullPathOptions.None, true).TrimEnd(DirectorySeparatorChar, WildcardStarMatchAllChar);
}
/// <summary>Gets the path as a long full path.</summary>
/// <returns>The path as an extended length path.</returns>
/// <exception cref="ArgumentException"/>
/// <exception cref="ArgumentNullException"/>
/// <param name="transaction">The transaction.</param>
/// <param name="path">The path to convert.</param>
/// <param name="pathFormat">The path format to use.</param>
/// <param name="options">Options for controlling the operation. Note that on .NET 3.5 the TrimEnd option has no effect.</param>
[SecurityCritical]
internal static string GetExtendedLengthPathCore(KernelTransaction transaction, string path, PathFormat pathFormat, GetFullPathOptions options)
{
if (null == path)
return null;
switch (pathFormat)
{
case PathFormat.LongFullPath:
if (options != GetFullPathOptions.None)
{
// If pathFormat equals LongFullPath it is possible that the trailing backslashg ('\') is not added or removed.
// Prevent that.
options &= ~GetFullPathOptions.CheckAdditional;
options &= ~GetFullPathOptions.CheckInvalidPathChars;
options &= ~GetFullPathOptions.FullCheck;
options &= ~GetFullPathOptions.TrimEnd;
path = ApplyFullPathOptions(path, options);
}
return path;
case PathFormat.FullPath:
return GetLongPathCore(path, GetFullPathOptions.None);
case PathFormat.RelativePath:
#if NET35
// .NET 3.5 the TrimEnd option has no effect.
options = options & ~GetFullPathOptions.TrimEnd;
#endif
return GetFullPathCore(transaction, false, path, GetFullPathOptions.AsLongPath | options);
default:
throw new ArgumentException("Invalid value: " + pathFormat, "pathFormat");
}
}
[SecurityCritical]
internal static int GetRootLength(string path, bool checkInvalidPathChars)
{
if (checkInvalidPathChars)
CheckInvalidPathChars(path, false, false);
var index = 0;
var length = path.Length;
if (length >= 1 && IsDVsc(path[0], false))
{
index = 1;
if (length >= 2 && IsDVsc(path[1], false))
{
index = 2;
var num = 2;
while (index < length && (!IsDVsc(path[index], false) || --num > 0))
++index;
}
}
else if (length >= 2 && IsDVsc(path[1], true))
{
index = 2;
if (length >= 3 && IsDVsc(path[2], false))
++index;
}
return index;
}
/// <summary>Check if <paramref name="c"/> is a directory- and/or volume-separator character.</summary>
/// <returns><c>true</c> if <paramref name="c"/> is a separator character.</returns>
/// <param name="c">The character to check.</param>
/// <param name="checkSeparatorChar">
/// If <c>null</c>, checks for all separator characters: <see cref="DirectorySeparatorChar"/>,
/// <see cref="AltDirectorySeparatorChar"/> and <see cref="VolumeSeparatorChar"/>
/// If <c>false</c>, only checks for: <see cref="DirectorySeparatorChar"/> and <see cref="AltDirectorySeparatorChar"/>
/// If <c>true</c> only checks for: <see cref="VolumeSeparatorChar"/>
/// </param>
[SecurityCritical]
internal static bool IsDVsc(char c, bool? checkSeparatorChar)
{
return checkSeparatorChar == null
// Check for all separator characters.
? c == DirectorySeparatorChar || c == AltDirectorySeparatorChar || c == VolumeSeparatorChar
// Check for some separator characters.
: ((bool) checkSeparatorChar
? c == VolumeSeparatorChar
: c == DirectorySeparatorChar || c == AltDirectorySeparatorChar);
}
[SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity")]
[SuppressMessage("Microsoft.Performance", "CA1809:AvoidExcessiveLocals")]
private static string NormalizePath(string path, GetFullPathOptions options)
{
var newBuffer = new StringBuilder(NativeMethods.MaxPathUnicode);
var index = 0;
uint numSpaces = 0;
uint numDots = 0;
var fixupDirectorySeparator = false;
// Number of significant chars other than potentially suppressible dots and spaces since the last directory or volume separator char.
uint numSigChars = 0;
// Index of last significant character.
var lastSigChar = -1;
// Whether this segment of the path (not the complete path) started with a volume separator char. Reject "c:...".
var startedWithVolumeSeparator = false;
var firstSegment = true;
var lastDirectorySeparatorPos = 0;
// LEGACY: This code is here for backwards compatibility reasons.
// It ensures that "\\foo.cs\bar.cs" stays "\\foo.cs\bar.cs" instead of being turned into "\foo.cs\bar.cs".
if (path.Trim().Length > 0 && (path[0] == DirectorySeparatorChar || path[0] == AltDirectorySeparatorChar))
{
newBuffer.Append(DirectorySeparatorChar);
index++;
lastSigChar = 0;
}
// Normalize the string, stripping out redundant dots, spaces, and slashes.
while (index < path.Length)
{
var currentChar = path[index];
// We handle both directory separators and dots specially. For
// directory separators, we consume consecutive appearances.
// For dots, we consume all dots beyond the second in
// succession. All other characters are added as is. In
// addition we consume all spaces after the last other char
// in a directory name up until the directory separator.
if (currentChar == DirectorySeparatorChar || currentChar == AltDirectorySeparatorChar)
{
// If we have a path like "123.../foo", remove the trailing dots.
// However, if we found "c:\temp\..\bar" or "c:\temp\...\bar", do not.
// Also remove trailing spaces from both files & directory names.
// This was agreed on with the OS team to fix undeletable directory
// names ending in spaces.
// If we saw a '\' as the previous last significant character and
// are simply going to write out dots, suppress them.
// If we only contain dots and slashes though, only allow
// a string like [dot]+ [space]*. Ignore everything else.
// Legal: "\.. \", "\...\", "\. \"
// Illegal: "\.. .\", "\. .\", "\ .\"
if (numSigChars == 0)
{
// Dot and space handling.
if (numDots > 0)
{
newBuffer.Append(NormalizePathDotSpaceHandler(path, lastSigChar, numDots, startedWithVolumeSeparator));
fixupDirectorySeparator = false;
// Continue in this case, potentially writing out '\'.
}
if (numSpaces > 0 && firstSegment)
{
// Handle strings like " \\server\share".
if (index + 1 < path.Length && (path[index + 1] == DirectorySeparatorChar || path[index + 1] == AltDirectorySeparatorChar))
newBuffer.Append(DirectorySeparatorChar);
}
}
numDots = 0;
numSpaces = 0; // Suppress trailing spaces
if (!fixupDirectorySeparator)
{
fixupDirectorySeparator = true;
newBuffer.Append(DirectorySeparatorChar);
}
numSigChars = 0;
lastSigChar = index;
startedWithVolumeSeparator = false;
firstSegment = false;
var thisPos = newBuffer.Length - 1;
if (thisPos - lastDirectorySeparatorPos - 1 > NativeMethods.MaxDirectoryLength)
throw new PathTooLongException(path);
lastDirectorySeparatorPos = thisPos;
} // if (Found directory separator)
else
switch (currentChar)
{
case CurrentDirectoryPrefixChar:
// Reduce only multiple .'s only after slash to 2 dots. For
// instance a...b is a valid file name.
numDots++;
// Don't flush out non-terminal spaces here, because they may in
// the end not be significant. Turn "c:\ . .\foo" -> "c:\foo"
// which is the conclusion of removing trailing dots & spaces,
// as well as folding multiple '\' characters.
break;
case ' ':
numSpaces++;
break;
default: // Normal character logic
fixupDirectorySeparator = false;
// To reject strings like "C:...\foo" and "C :\foo"
if (firstSegment && currentChar == VolumeSeparatorChar)
{
// Only accept "C:", not "c :" or ":"
// Get a drive letter or ' ' if index is 0.
var driveLetter = index > 0 ? path[index - 1] : ' ';
var validPath = numDots == 0 && numSigChars >= 1 && driveLetter != ' ';
if (!validPath)
throw new ArgumentException(path, "path");
startedWithVolumeSeparator = true;
// We need special logic to make " c:" work, we should not fix paths like " foo::$DATA"
if (numSigChars > 1)
{
// Common case, simply do nothing.
var spaceCount = 0; // How many spaces did we write out, numSpaces has already been reset.
while (spaceCount < newBuffer.Length && newBuffer[spaceCount] == ' ')
spaceCount++;
if (numSigChars - spaceCount == 1)
{
newBuffer.Length = 0;
newBuffer.Append(driveLetter);
// Overwrite spaces, we need a special case to not break " foo" as a relative path.
}
}
numSigChars = 0;
}
else
numSigChars += 1 + numDots + numSpaces;
// Copy any spaces & dots since the last significant character to here.
// Note we only counted the number of dots & spaces, and don't know what order they're in. Hence the copy.
if (numDots > 0 || numSpaces > 0)
{
var numCharsToCopy = lastSigChar >= 0 ? index - lastSigChar - 1 : index;
if (numCharsToCopy > 0)
{
for (var i = 0; i < numCharsToCopy; i++)
newBuffer.Append(path[lastSigChar + 1 + i]);
}
numDots = 0;
numSpaces = 0;
}
newBuffer.Append(currentChar);
lastSigChar = index;
break;
}
index++;
}
if (newBuffer.Length - 1 - lastDirectorySeparatorPos > NativeMethods.MaxDirectoryLength)
throw new PathTooLongException(path);
// Drop any trailing dots and spaces from file & directory names, EXCEPT we MUST make sure that "C:\foo\.." is correctly handled.
// Also handle "C:\foo\." -> "C:\foo", while "C:\." -> "C:\"
if (numSigChars == 0)
{
// Dot and space handling.
if (numDots > 0)
newBuffer.Append(NormalizePathDotSpaceHandler(path, lastSigChar, numDots, startedWithVolumeSeparator));
}
// If we ended up eating all the characters, bail out.
if (newBuffer.Length == 0)
throw new ArgumentException(path, "path");
// Disallow URL's here. Some of our other Win32 API calls will reject them later, so we might be better off rejecting them here.
// Note we've probably turned them into "file:\D:\foo.tmp" by now.
// But for compatibility, ensure that callers that aren't doing a full check aren't rejected here.
if ((options & GetFullPathOptions.FullCheck) != 0)
{
var newBufferString = newBuffer.ToString();
if (newBufferString.StartsWith(Uri.UriSchemeHttp + ":", StringComparison.OrdinalIgnoreCase) ||
newBufferString.StartsWith(Uri.UriSchemeFile + ":", StringComparison.OrdinalIgnoreCase))
throw new ArgumentException(path, "path");
}
// Call the Win32 API to do the final canonicalization step.
const int result = 1;
// Throw an ArgumentException for paths like \\, \\server, \\server\
// This check can only be properly done after normalizing, so // \\foo\.. will be properly rejected.
// Also, reject \\?\GLOBALROOT\ (an internal kernel path) because it provides aliases for drives.
if (newBuffer.Length > 1 && newBuffer[0] == DirectorySeparatorChar && newBuffer[1] == DirectorySeparatorChar)
{
var startIndex = 2;
while (startIndex < result)
{
if (newBuffer[startIndex] == DirectorySeparatorChar)
{
startIndex++;
break;
}
startIndex++;
}
if (startIndex == result)
throw new ArgumentException(path, "path");
}
return newBuffer.ToString();
}
/// <summary>Dot and space handling.</summary>
private static StringBuilder NormalizePathDotSpaceHandler(string path, int lastSigChar, uint numDots, bool startedWithVolumeSeparator)
{
var newBuffer = new StringBuilder(NativeMethods.MaxPathUnicode);
// Look for ".[space]*" or "..[space]*".
var start = lastSigChar + 1;
if (path[start] != CurrentDirectoryPrefixChar)
throw new ArgumentException(path, "path");
// Only allow "[dot]+[space]*", and normalize the legal ones to "." or "..".
if (numDots >= 2)
{
// Reject "C:...".
if (startedWithVolumeSeparator && numDots > 2)
throw new ArgumentException(path, "path");
if (path[start + 1] == CurrentDirectoryPrefixChar)
{
// Search for a space in the middle of the dots and throw.
for (var i = start + 2; i < start + numDots; i++)
{
if (path[i] != CurrentDirectoryPrefixChar)
throw new ArgumentException(path, "path");
}
numDots = 2;
}
else
{
if (numDots > 1)
throw new ArgumentException(path, "path");
numDots = 1;
}
}
if (numDots == 2)
newBuffer.Append(CurrentDirectoryPrefixChar);
newBuffer.Append(CurrentDirectoryPrefixChar);
return newBuffer;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Runtime.CompilerServices;
using System.Threading.Tasks.Sources;
using System.Threading.Tasks.Tests;
using Microsoft.Xunit.Performance;
using Xunit;
namespace System.Threading.Tasks
{
public class ValueTaskPerfTest
{
[Benchmark(InnerIterationCount = 10_000_000), MeasureGCAllocations]
public async Task Await_FromResult()
{
ValueTask<int> vt = new ValueTask<int>(42);
foreach (BenchmarkIteration iteration in Benchmark.Iterations)
{
long iters = Benchmark.InnerIterationCount;
using (iteration.StartMeasurement())
{
for (long i = 0; i < iters; i++)
{
await vt;
}
}
}
}
[Benchmark(InnerIterationCount = 10_000_000), MeasureGCAllocations]
public async Task Await_FromCompletedTask()
{
ValueTask<int> vt = new ValueTask<int>(Task.FromResult(42));
foreach (BenchmarkIteration iteration in Benchmark.Iterations)
{
long iters = Benchmark.InnerIterationCount;
using (iteration.StartMeasurement())
{
for (long i = 0; i < iters; i++)
{
await vt;
}
}
}
}
[Benchmark(InnerIterationCount = 10_000_000), MeasureGCAllocations]
public async Task Await_FromCompletedValueTaskSource()
{
ValueTask<int> vt = new ValueTask<int>(ManualResetValueTaskSourceFactory.Completed<int>(42), 0);
foreach (BenchmarkIteration iteration in Benchmark.Iterations)
{
long iters = Benchmark.InnerIterationCount;
using (iteration.StartMeasurement())
{
for (long i = 0; i < iters; i++)
{
await vt;
}
}
}
}
[Benchmark(InnerIterationCount = 10_000_000), MeasureGCAllocations]
public async Task CreateAndAwait_FromResult()
{
foreach (BenchmarkIteration iteration in Benchmark.Iterations)
{
long iters = Benchmark.InnerIterationCount;
using (iteration.StartMeasurement())
{
for (long i = 0; i < iters; i++)
{
await new ValueTask<int>((int)i);
}
}
}
}
[Benchmark(InnerIterationCount = 10_000_000), MeasureGCAllocations]
public async Task CreateAndAwait_FromResult_ConfigureAwait()
{
foreach (BenchmarkIteration iteration in Benchmark.Iterations)
{
long iters = Benchmark.InnerIterationCount;
using (iteration.StartMeasurement())
{
for (long i = 0; i < iters; i++)
{
await new ValueTask<int>((int)i).ConfigureAwait(false);
}
}
}
}
[Benchmark(InnerIterationCount = 10_000_000), MeasureGCAllocations]
public async Task CreateAndAwait_FromCompletedTask()
{
Task<int> t = Task.FromResult(42);
foreach (BenchmarkIteration iteration in Benchmark.Iterations)
{
long iters = Benchmark.InnerIterationCount;
using (iteration.StartMeasurement())
{
for (long i = 0; i < iters; i++)
{
await new ValueTask<int>(t);
}
}
}
}
[Benchmark(InnerIterationCount = 10_000_000), MeasureGCAllocations]
public async Task CreateAndAwait_FromCompletedTask_ConfigureAwait()
{
Task<int> t = Task.FromResult(42);
foreach (BenchmarkIteration iteration in Benchmark.Iterations)
{
long iters = Benchmark.InnerIterationCount;
using (iteration.StartMeasurement())
{
for (long i = 0; i < iters; i++)
{
await new ValueTask<int>(t).ConfigureAwait(false);
}
}
}
}
[Benchmark(InnerIterationCount = 10_000_000), MeasureGCAllocations]
public async Task CreateAndAwait_FromCompletedValueTaskSource()
{
IValueTaskSource<int> vts = ManualResetValueTaskSourceFactory.Completed(42);
foreach (BenchmarkIteration iteration in Benchmark.Iterations)
{
long iters = Benchmark.InnerIterationCount;
using (iteration.StartMeasurement())
{
for (long i = 0; i < iters; i++)
{
await new ValueTask<int>(vts, 0);
}
}
}
}
[Benchmark(InnerIterationCount = 10_000_000), MeasureGCAllocations]
public async Task CreateAndAwait_FromCompletedValueTaskSource_ConfigureAwait()
{
IValueTaskSource<int> vts = ManualResetValueTaskSourceFactory.Completed(42);
foreach (BenchmarkIteration iteration in Benchmark.Iterations)
{
long iters = Benchmark.InnerIterationCount;
using (iteration.StartMeasurement())
{
for (long i = 0; i < iters; i++)
{
await new ValueTask<int>(vts, 0).ConfigureAwait(false);
}
}
}
}
[Benchmark(InnerIterationCount = 1_000_000), MeasureGCAllocations]
public async Task CreateAndAwait_FromYieldingAsyncMethod()
{
foreach (BenchmarkIteration iteration in Benchmark.Iterations)
{
long iters = Benchmark.InnerIterationCount;
using (iteration.StartMeasurement())
{
for (long i = 0; i < iters; i++)
{
await new ValueTask<int>(YieldOnce());
}
}
}
}
[Benchmark(InnerIterationCount = 1_000_000), MeasureGCAllocations]
public async Task CreateAndAwait_FromDelayedTCS()
{
foreach (BenchmarkIteration iteration in Benchmark.Iterations)
{
long iters = Benchmark.InnerIterationCount;
using (iteration.StartMeasurement())
{
for (long i = 0; i < iters; i++)
{
var tcs = new TaskCompletionSource<int>();
ValueTask<int> vt = AwaitTcsAsValueTask(tcs);
tcs.SetResult(42);
await vt;
}
}
}
}
[Benchmark(InnerIterationCount = 10_000_000), MeasureGCAllocations]
public void Copy_PassAsArgumentAndReturn_FromResult()
{
ValueTask<int> vt = new ValueTask<int>(42);
foreach (BenchmarkIteration iteration in Benchmark.Iterations)
{
long iters = Benchmark.InnerIterationCount;
using (iteration.StartMeasurement())
{
for (long i = 0; i < iters; i++)
{
vt = ReturnValueTask(vt);
}
}
}
}
[Benchmark(InnerIterationCount = 10_000_000), MeasureGCAllocations]
public void Copy_PassAsArgumentAndReturn_FromTask()
{
ValueTask<int> vt = new ValueTask<int>(Task.FromResult(42));
foreach (BenchmarkIteration iteration in Benchmark.Iterations)
{
long iters = Benchmark.InnerIterationCount;
using (iteration.StartMeasurement())
{
for (long i = 0; i < iters; i++)
{
vt = ReturnValueTask(vt);
}
}
}
}
[Benchmark(InnerIterationCount = 10_000_000), MeasureGCAllocations]
public void Copy_PassAsArgumentAndReturn_FromValueTaskSource()
{
ValueTask<int> vt = new ValueTask<int>(ManualResetValueTaskSourceFactory.Completed(42), 0);
foreach (BenchmarkIteration iteration in Benchmark.Iterations)
{
long iters = Benchmark.InnerIterationCount;
using (iteration.StartMeasurement())
{
for (long i = 0; i < iters; i++)
{
vt = ReturnValueTask(vt);
}
}
}
}
[MethodImpl(MethodImplOptions.NoInlining)]
private static ValueTask<int> ReturnValueTask(ValueTask<int> vt) => vt;
private async ValueTask<int> AwaitTcsAsValueTask(TaskCompletionSource<int> tcs) => await new ValueTask<int>(tcs.Task).ConfigureAwait(false);
private async Task<int> YieldOnce() { await Task.Yield(); return 42; }
}
}
| |
using System;
using DarkMultiPlayerCommon;
using UnityEngine;
namespace DarkMultiPlayer
{
public class ConnectionWindow
{
public bool display = false;
public bool networkWorkerDisconnected = true;
public bool connectEventHandled = true;
public bool disconnectEventHandled = true;
public bool addEventHandled = true;
public bool editEventHandled = true;
public bool removeEventHandled = true;
public bool renameEventHandled = true;
public bool addingServer = false;
public bool addingServerSafe = false;
public int selected = -1;
private int selectedSafe = -1;
public string status = "";
public ServerEntry addEntry = null;
public ServerEntry editEntry = null;
private bool initialized;
//Add window
private string serverName = "Local";
private string serverAddress = "127.0.0.1";
private string serverPort = "6702";
//GUI Layout
private Rect windowRect;
private Rect moveRect;
private GUILayoutOption[] labelOptions;
private GUILayoutOption[] layoutOptions;
private GUIStyle windowStyle;
private GUIStyle buttonStyle;
private GUIStyle textAreaStyle;
private GUIStyle statusStyle;
private Vector2 scrollPos;
//const
private const float WINDOW_HEIGHT = 400;
private const float WINDOW_WIDTH = 400;
//version
private string version()
{
if (Common.PROGRAM_VERSION.Length == 40)
{
return "build " + Common.PROGRAM_VERSION.Substring(0, 7);
}
return Common.PROGRAM_VERSION;
}
//Services
private Settings dmpSettings;
private OptionsWindow optionsWindow;
private ServerListDisclaimerWindow serverListDisclaimerWindow;
private ServersWindow serversWindow;
public ConnectionWindow(Settings dmpSettings, OptionsWindow optionsWindow, ServersWindow serversWindow, ServerListDisclaimerWindow serverListDisclaimerWindow)
{
this.dmpSettings = dmpSettings;
this.optionsWindow = optionsWindow;
this.serversWindow = serversWindow;
this.serverListDisclaimerWindow = serverListDisclaimerWindow;
}
public void Update()
{
selectedSafe = selected;
addingServerSafe = addingServer;
display = (HighLogic.LoadedScene == GameScenes.MAINMENU);
}
private void InitGUI()
{
//Setup GUI stuff
windowRect = new Rect(Screen.width * 0.9f - WINDOW_WIDTH, Screen.height / 2f - WINDOW_HEIGHT / 2f, WINDOW_WIDTH, WINDOW_HEIGHT);
moveRect = new Rect(0, 0, 10000, 20);
windowStyle = new GUIStyle(GUI.skin.window);
textAreaStyle = new GUIStyle(GUI.skin.textArea);
buttonStyle = new GUIStyle(GUI.skin.button);
//buttonStyle.fontSize = 10;
statusStyle = new GUIStyle(GUI.skin.label);
//statusStyle.fontSize = 10;
statusStyle.normal.textColor = Color.yellow;
layoutOptions = new GUILayoutOption[4];
layoutOptions[0] = GUILayout.MinWidth(WINDOW_WIDTH);
layoutOptions[1] = GUILayout.MaxWidth(WINDOW_WIDTH);
layoutOptions[2] = GUILayout.MinHeight(WINDOW_HEIGHT);
layoutOptions[3] = GUILayout.MaxHeight(WINDOW_HEIGHT);
labelOptions = new GUILayoutOption[1];
labelOptions[0] = GUILayout.Width(100);
}
public void Draw()
{
if (display)
{
if (!initialized)
{
initialized = true;
InitGUI();
}
windowRect = DMPGuiUtil.PreventOffscreenWindow(GUILayout.Window(6702 + Client.WINDOW_OFFSET, windowRect, DrawContent, "DarkMultiPlayer " + version(), windowStyle, layoutOptions));
}
}
private void DrawContent(int windowID)
{
GUILayout.BeginVertical();
GUI.DragWindow(moveRect);
GUILayout.Space(20);
GUILayout.BeginHorizontal();
GUILayout.Label("Player name:", labelOptions);
string oldPlayerName = dmpSettings.playerName;
dmpSettings.playerName = GUILayout.TextArea(dmpSettings.playerName, 32, textAreaStyle); // Max 32 characters
if (oldPlayerName != dmpSettings.playerName)
{
dmpSettings.playerName = dmpSettings.playerName.Replace("\n", "");
renameEventHandled = false;
}
GUILayout.EndHorizontal();
GUILayout.BeginHorizontal();
//Draw add button
string addMode = selectedSafe == -1 ? "Add" : "Edit";
string buttonAddMode = addMode;
if (addingServer)
{
buttonAddMode = "Cancel";
}
addingServer = GUILayout.Toggle(addingServer, buttonAddMode, buttonStyle);
if (addingServer && !addingServerSafe)
{
if (selected != -1)
{
//Load the existing server settings
serverName = dmpSettings.servers[selected].name;
serverAddress = dmpSettings.servers[selected].address;
serverPort = dmpSettings.servers[selected].port.ToString();
}
}
//Draw connect button
if (networkWorkerDisconnected)
{
GUI.enabled = (selectedSafe != -1);
if (GUILayout.Button("Connect", buttonStyle))
{
connectEventHandled = false;
}
}
else
{
if (GUILayout.Button("Disconnect", buttonStyle))
{
disconnectEventHandled = false;
}
}
//Draw remove button
if (GUILayout.Button("Remove", buttonStyle))
{
if (removeEventHandled == true)
{
removeEventHandled = false;
}
}
GUI.enabled = true;
optionsWindow.display = GUILayout.Toggle(optionsWindow.display, "Options", buttonStyle);
if (dmpSettings.serverlistMode != -1)
{
if (dmpSettings.serverlistMode == 0)
{
if (GUILayout.Button("Servers", buttonStyle))
{
serverListDisclaimerWindow.SpawnDialog();
}
}
else
{
serversWindow.display = GUILayout.Toggle(serversWindow.display, "Servers", buttonStyle);
}
}
GUILayout.EndHorizontal();
if (addingServerSafe)
{
GUILayout.BeginHorizontal();
GUILayout.Label("Name:", labelOptions);
serverName = GUILayout.TextArea(serverName, textAreaStyle);
GUILayout.EndHorizontal();
GUILayout.BeginHorizontal();
GUILayout.Label("Address:", labelOptions);
serverAddress = GUILayout.TextArea(serverAddress, textAreaStyle).Trim();
GUILayout.EndHorizontal();
GUILayout.BeginHorizontal();
GUILayout.Label("Port:", labelOptions);
serverPort = GUILayout.TextArea(serverPort, textAreaStyle).Trim();
GUILayout.EndHorizontal();
if (GUILayout.Button(addMode + " server", buttonStyle))
{
if (addEventHandled == true)
{
if (selected == -1)
{
addEntry = new ServerEntry();
addEntry.name = serverName;
addEntry.address = serverAddress;
addEntry.port = 6702;
Int32.TryParse(serverPort, out addEntry.port);
addEventHandled = false;
}
else
{
editEntry = new ServerEntry();
editEntry.name = serverName;
editEntry.address = serverAddress;
editEntry.port = 6702;
Int32.TryParse(serverPort, out editEntry.port);
editEventHandled = false;
}
}
}
}
GUILayout.Label("Servers:");
if (dmpSettings.servers.Count == 0)
{
GUILayout.Label("(None - Add a server first)");
}
scrollPos = GUILayout.BeginScrollView(scrollPos, GUILayout.Width(WINDOW_WIDTH - 5), GUILayout.Height(WINDOW_HEIGHT - 100));
for (int serverPos = 0; serverPos < dmpSettings.servers.Count; serverPos++)
{
bool thisSelected = GUILayout.Toggle(serverPos == selectedSafe, dmpSettings.servers[serverPos].name, buttonStyle);
if (selected == selectedSafe)
{
if (thisSelected)
{
if (selected != serverPos)
{
selected = serverPos;
addingServer = false;
}
}
else if (selected == serverPos)
{
selected = -1;
addingServer = false;
}
}
}
GUILayout.EndScrollView();
//Draw status message
GUILayout.Label(status, statusStyle);
GUILayout.EndVertical();
}
}
}
| |
//
// PlaylistSource.cs
//
// Authors:
// Aaron Bockover <[email protected]>
// Gabriel Burt <[email protected]>
//
// Copyright (C) 2005-2008 Novell, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using System.Collections;
using System.Collections.Generic;
using Mono.Unix;
using Hyena;
using Hyena.Data;
using Hyena.Data.Sqlite;
using Hyena.Collections;
using Banshee.Base;
using Banshee.ServiceStack;
using Banshee.Database;
using Banshee.Sources;
using Banshee.Collection;
using Banshee.Collection.Database;
namespace Banshee.Playlist
{
public class PlaylistSource : AbstractPlaylistSource, IUnmapableSource
{
private static HyenaSqliteCommand add_track_range_command;
private static HyenaSqliteCommand add_track_command;
private static HyenaSqliteCommand remove_track_range_command;
private static string add_track_range_from_joined_model_sql;
private static string generic_name = Catalog.GetString ("Playlist");
protected override string SourceTable {
get { return "CorePlaylists"; }
}
protected override string SourcePrimaryKey {
get { return "PlaylistID"; }
}
protected override string TrackJoinTable {
get { return "CorePlaylistEntries"; }
}
protected long MaxViewOrder {
get {
return ServiceManager.DbConnection.Query<long> (
"SELECT MAX(ViewOrder) + 1 FROM CorePlaylistEntries WHERE PlaylistID = ?", DbId);
}
}
static PlaylistSource ()
{
add_track_range_command = new HyenaSqliteCommand (@"
INSERT INTO CorePlaylistEntries
(EntryID, PlaylistID, TrackID, ViewOrder)
SELECT null, ?, ItemID, OrderId + ?
FROM CoreCache WHERE ModelID = ?
LIMIT ?, ?"
);
add_track_command = new HyenaSqliteCommand (@"
INSERT INTO CorePlaylistEntries
(EntryID, PlaylistID, TrackID, ViewOrder)
VALUES (null, ?, ?, ?)"
);
add_track_range_from_joined_model_sql = @"
INSERT INTO CorePlaylistEntries
(EntryID, PlaylistID, TrackID, ViewOrder)
SELECT null, ?, TrackID, OrderId + ?
FROM CoreCache c INNER JOIN {0} e ON c.ItemID = e.{1}
WHERE ModelID = ?
LIMIT ?, ?";
remove_track_range_command = new HyenaSqliteCommand (@"
DELETE FROM CorePlaylistEntries WHERE PlaylistID = ? AND
EntryID IN (SELECT ItemID FROM CoreCache
WHERE ModelID = ? ORDER BY OrderID LIMIT ?, ?)"
);
}
#region Constructors
public PlaylistSource (string name, PrimarySource parent) : base (generic_name, name, parent)
{
SetProperties ();
}
protected PlaylistSource (string name, long dbid, PrimarySource parent) : this (name, dbid, -1, 0, parent, 0, false)
{
}
protected PlaylistSource (string name, long dbid, int sortColumn, int sortType, PrimarySource parent, int count, bool is_temp)
: base (generic_name, name, dbid, sortColumn, sortType, parent, is_temp)
{
SetProperties ();
SavedCount = count;
}
#endregion
private void SetProperties ()
{
Properties.SetString ("Icon.Name", "source-playlist");
Properties.SetString ("RemoveTracksActionLabel", Catalog.GetString ("Remove From Playlist"));
Properties.SetString ("UnmapSourceActionLabel", Catalog.GetString ("Delete Playlist"));
}
#region Source Overrides
public override bool AcceptsInputFromSource (Source source)
{
return base.AcceptsInputFromSource (source) && (
source == Parent || (source.Parent == Parent || Parent == null)
// This is commented out because we don't support (yet?) DnD from Play Queue to a playlist
//(source.Parent == Parent || Parent == null || (source.Parent == null && !(source is PrimarySource)))
);
}
public override SourceMergeType SupportedMergeTypes {
get { return SourceMergeType.All; }
}
#endregion
#region AbstractPlaylist overrides
protected override void AfterInitialized ()
{
base.AfterInitialized ();
if (PrimarySource != null) {
PrimarySource.TracksChanged += HandleTracksChanged;
PrimarySource.TracksDeleted += HandleTracksDeleted;
}
TrackModel.CanReorder = true;
}
protected override void Update ()
{
ServiceManager.DbConnection.Execute (new HyenaSqliteCommand (
String.Format (
@"UPDATE {0}
SET Name = ?,
SortColumn = ?,
SortType = ?,
CachedCount = ?,
IsTemporary = ?
WHERE PlaylistID = ?",
SourceTable
), Name, -1, 0, Count, IsTemporary, dbid
));
}
protected override void Create ()
{
DbId = ServiceManager.DbConnection.Execute (new HyenaSqliteCommand (
@"INSERT INTO CorePlaylists (PlaylistID, Name, SortColumn, SortType, PrimarySourceID, IsTemporary)
VALUES (NULL, ?, ?, ?, ?, ?)",
Name, -1, 1, PrimarySourceId, IsTemporary //SortColumn, SortType
));
}
#endregion
#region DatabaseSource overrides
// We can add tracks only if our parent can
public override bool CanAddTracks {
get {
DatabaseSource ds = Parent as DatabaseSource;
return ds != null ? ds.CanAddTracks : base.CanAddTracks;
}
}
// We can remove tracks only if our parent can
public override bool CanRemoveTracks {
get {
return (Parent is PrimarySource)
? !(Parent as PrimarySource).PlaylistsReadOnly
: true;
}
}
#endregion
#region IUnmapableSource Implementation
public virtual bool Unmap ()
{
if (DbId != null) {
ServiceManager.DbConnection.Execute (new HyenaSqliteCommand (@"
BEGIN TRANSACTION;
DELETE FROM CorePlaylists WHERE PlaylistID = ?;
DELETE FROM CorePlaylistEntries WHERE PlaylistID = ?;
COMMIT TRANSACTION",
DbId, DbId
));
}
ThreadAssist.ProxyToMain (Remove);
return true;
}
#endregion
protected void AddTrack (long track_id)
{
ServiceManager.DbConnection.Execute (add_track_command, DbId, track_id, MaxViewOrder);
OnTracksAdded ();
}
protected override void AddTrack (DatabaseTrackInfo track)
{
AddTrack (track.TrackId);
}
public override bool AddSelectedTracks (Source source, Selection selection)
{
if (Parent == null || source == Parent || source.Parent == Parent) {
return base.AddSelectedTracks (source, selection);
} else {
// Adding from a different primary source, so add to our primary source first
//PrimarySource primary = Parent as PrimarySource;
//primary.AddSelectedTracks (model);
// then add to us
//Log.Information ("Note: Feature Not Implemented", String.Format ("In this alpha release, you can only add tracks to {0} from {1} or its playlists.", Name, Parent.Name), true);
}
return false;
}
public virtual void ReorderSelectedTracks (int drop_row)
{
if (TrackModel.Selection.Count == 0 || TrackModel.Selection.AllSelected) {
return;
}
TrackInfo track = TrackModel[drop_row];
long order = track == null
? ServiceManager.DbConnection.Query<long> ("SELECT MAX(ViewOrder) + 1 FROM CorePlaylistEntries WHERE PlaylistID = ?", DbId)
: ServiceManager.DbConnection.Query<long> ("SELECT ViewOrder FROM CorePlaylistEntries WHERE PlaylistID = ? AND EntryID = ?", DbId, Convert.ToInt64 (track.CacheEntryId));
// Make room for our new items
if (track != null) {
ServiceManager.DbConnection.Execute ("UPDATE CorePlaylistEntries SET ViewOrder = ViewOrder + ? WHERE PlaylistID = ? AND ViewOrder >= ?",
TrackModel.Selection.Count, DbId, order
);
}
HyenaSqliteCommand update_command = new HyenaSqliteCommand (String.Format ("UPDATE CorePlaylistEntries SET ViewOrder = ? WHERE PlaylistID = {0} AND EntryID = ?", DbId));
HyenaSqliteCommand select_command = new HyenaSqliteCommand (String.Format ("SELECT ItemID FROM CoreCache WHERE ModelID = {0} LIMIT ?, ?", DatabaseTrackModel.CacheId));
// Reorder the selected items
ServiceManager.DbConnection.BeginTransaction ();
foreach (RangeCollection.Range range in TrackModel.Selection.Ranges) {
foreach (long entry_id in ServiceManager.DbConnection.QueryEnumerable<long> (select_command, range.Start, range.Count)) {
ServiceManager.DbConnection.Execute (update_command, order++, entry_id);
}
}
ServiceManager.DbConnection.CommitTransaction ();
Reload ();
}
DatabaseTrackListModel last_add_range_from_model;
HyenaSqliteCommand last_add_range_command = null;
protected override void AddTrackRange (DatabaseTrackListModel from, RangeCollection.Range range)
{
last_add_range_command = (!from.CachesJoinTableEntries)
? add_track_range_command
: from == last_add_range_from_model
? last_add_range_command
: new HyenaSqliteCommand (String.Format (add_track_range_from_joined_model_sql, from.JoinTable, from.JoinPrimaryKey));
long first_order_id = ServiceManager.DbConnection.Query<long> ("SELECT OrderID FROM CoreCache WHERE ModelID = ? LIMIT 1 OFFSET ?", from.CacheId, range.Start);
ServiceManager.DbConnection.Execute (last_add_range_command, DbId, MaxViewOrder - first_order_id, from.CacheId, range.Start, range.Count);
last_add_range_from_model = from;
}
protected override void RemoveTrackRange (DatabaseTrackListModel from, RangeCollection.Range range)
{
ServiceManager.DbConnection.Execute (remove_track_range_command,
DbId, from.CacheId, range.Start, range.Count);
}
protected override void HandleTracksChanged (Source sender, TrackEventArgs args)
{
if (args.When > last_updated) {
last_updated = args.When;
// Playlists do not need to reload if only certain columns are changed
if (NeedsReloadWhenFieldsChanged (args.ChangedFields)) {
// TODO Optimization: playlists only need to reload if one of their tracks was updated
//if (ServiceManager.DbConnection.Query<int> (count_updated_command, last_updated) > 0) {
Reload ();
//}
} else {
InvalidateCaches ();
}
}
}
protected override void HandleTracksDeleted (Source sender, TrackEventArgs args)
{
if (args.When > last_removed) {
last_removed = args.When;
Reload ();
/*if (ServiceManager.DbConnection.Query<int> (count_removed_command, last_removed) > 0) {
//ServiceManager.DbConnection.Execute ("DELETE FROM CoreCache WHERE ModelID = ? AND ItemID IN (SELECT EntryID FROM CorePlaylistEntries WHERE PlaylistID = ? AND TrackID IN (TrackID FROM CoreRemovedTracks))");
ServiceManager.DbConnection.Execute ("DELETE FROM CorePlaylistEntries WHERE TrackID IN (SELECT TrackID FROM CoreRemovedTracks)");
//track_model.UpdateAggregates ();
//OnUpdated ();
}*/
}
}
public static IEnumerable<PlaylistSource> LoadAll (PrimarySource parent)
{
ClearTemporary ();
using (HyenaDataReader reader = new HyenaDataReader (ServiceManager.DbConnection.Query (
@"SELECT PlaylistID, Name, SortColumn, SortType, PrimarySourceID, CachedCount, IsTemporary FROM CorePlaylists
WHERE Special = 0 AND PrimarySourceID = ?", parent.DbId))) {
while (reader.Read ()) {
yield return new PlaylistSource (
reader.Get<string> (1), reader.Get<long> (0),
reader.Get<int> (2), reader.Get<int> (3), parent,
reader.Get<int> (5), reader.Get<bool> (6)
);
}
}
}
private static void ClearTemporary ()
{
ServiceManager.DbConnection.BeginTransaction ();
ServiceManager.DbConnection.Execute (@"
DELETE FROM CorePlaylistEntries WHERE PlaylistID IN (SELECT PlaylistID FROM CorePlaylists WHERE IsTemporary = 1);
DELETE FROM CorePlaylists WHERE IsTemporary = 1;"
);
ServiceManager.DbConnection.CommitTransaction ();
}
public static void ClearTemporary (PrimarySource parent)
{
if (parent != null) {
ServiceManager.DbConnection.BeginTransaction ();
ServiceManager.DbConnection.Execute (@"
DELETE FROM CorePlaylistEntries WHERE PlaylistID IN (SELECT PlaylistID FROM CorePlaylists WHERE PrimarySourceID = ? AND IsTemporary = 1);
DELETE FROM CorePlaylists WHERE PrimarySourceID = ? AND IsTemporary = 1;", parent.DbId, parent.DbId
);
ServiceManager.DbConnection.CommitTransaction ();
}
}
private static long GetPlaylistId (string name)
{
return ServiceManager.DbConnection.Query<long> (
"SELECT PlaylistID FROM Playlists WHERE Name = ? LIMIT 1", name
);
}
private static bool PlaylistExists (string name)
{
return GetPlaylistId (name) > 0;
}
public static string CreateUniqueName ()
{
return NamingUtil.PostfixDuplicate (Catalog.GetString ("New Playlist"), PlaylistExists);
}
public static string CreateUniqueName (IEnumerable tracks)
{
return NamingUtil.PostfixDuplicate (NamingUtil.GenerateTrackCollectionName (
tracks, Catalog.GetString ("New Playlist")), PlaylistExists);
}
}
}
| |
// ***********************************************************************
// Copyright (c) 2012-2014 Charlie Poole, Rob Prouse
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// ***********************************************************************
using System;
using System.Reflection;
using System.Threading;
using NUnit.Framework.Interfaces;
using NUnit.Framework.Internal;
using NUnit.Framework.Internal.Execution;
using System.Collections.Generic;
using System.IO;
using System.Diagnostics;
using System.Security;
#if NET20 || NET35 || NET40 || NET45
using System.Windows.Forms;
#endif
namespace NUnit.Framework.Api
{
/// <summary>
/// Implementation of ITestAssemblyRunner
/// </summary>
public class NUnitTestAssemblyRunner : ITestAssemblyRunner
{
private static readonly Logger log = InternalTrace.GetLogger("DefaultTestAssemblyRunner");
private readonly ITestAssemblyBuilder _builder;
private readonly ManualResetEventSlim _runComplete = new ManualResetEventSlim();
// Saved Console.Out and Console.Error
private TextWriter _savedOut;
private TextWriter _savedErr;
#if PARALLEL
// Event Pump
private EventPump _pump;
#endif
#region Constructors
/// <summary>
/// Initializes a new instance of the <see cref="NUnitTestAssemblyRunner"/> class.
/// </summary>
/// <param name="builder">The builder.</param>
public NUnitTestAssemblyRunner(ITestAssemblyBuilder builder)
{
_builder = builder;
}
#endregion
#region Properties
#if PARALLEL
/// <summary>
/// Gets the default level of parallel execution (worker threads)
/// </summary>
public static int DefaultLevelOfParallelism
{
get { return Math.Max(Environment.ProcessorCount, 2); }
}
#endif
/// <summary>
/// The tree of tests that was loaded by the builder
/// </summary>
public ITest LoadedTest { get; private set; }
/// <summary>
/// The test result, if a run has completed
/// </summary>
public ITestResult Result
{
get { return TopLevelWorkItem == null ? null : TopLevelWorkItem.Result; }
}
/// <summary>
/// Indicates whether a test is loaded
/// </summary>
public bool IsTestLoaded
{
get { return LoadedTest != null; }
}
/// <summary>
/// Indicates whether a test is running
/// </summary>
public bool IsTestRunning
{
get { return TopLevelWorkItem != null && TopLevelWorkItem.State == WorkItemState.Running; }
}
/// <summary>
/// Indicates whether a test run is complete
/// </summary>
public bool IsTestComplete
{
get { return TopLevelWorkItem != null && TopLevelWorkItem.State == WorkItemState.Complete; }
}
/// <summary>
/// Our settings, specified when loading the assembly
/// </summary>
private IDictionary<string, object> Settings { get; set; }
/// <summary>
/// The top level WorkItem created for the assembly as a whole
/// </summary>
private WorkItem TopLevelWorkItem { get; set; }
/// <summary>
/// The TestExecutionContext for the top level WorkItem
/// </summary>
private TestExecutionContext Context { get; set; }
#endregion
#region Public Methods
/// <summary>
/// Loads the tests found in an Assembly
/// </summary>
/// <param name="assemblyNameOrPath">File name or path of the assembly to load</param>
/// <param name="settings">Dictionary of option settings for loading the assembly</param>
/// <returns>True if the load was successful</returns>
public ITest Load(string assemblyNameOrPath, IDictionary<string, object> settings)
{
Settings = settings;
if (settings.ContainsKey(FrameworkPackageSettings.RandomSeed))
Randomizer.InitialSeed = (int)settings[FrameworkPackageSettings.RandomSeed];
return LoadedTest = _builder.Build(assemblyNameOrPath, settings);
}
/// <summary>
/// Loads the tests found in an Assembly
/// </summary>
/// <param name="assembly">The assembly to load</param>
/// <param name="settings">Dictionary of option settings for loading the assembly</param>
/// <returns>True if the load was successful</returns>
public ITest Load(Assembly assembly, IDictionary<string, object> settings)
{
Settings = settings;
if (settings.ContainsKey(FrameworkPackageSettings.RandomSeed))
Randomizer.InitialSeed = (int)settings[FrameworkPackageSettings.RandomSeed];
return LoadedTest = _builder.Build(assembly, settings);
}
/// <summary>
/// Count Test Cases using a filter
/// </summary>
/// <param name="filter">The filter to apply</param>
/// <returns>The number of test cases found</returns>
public int CountTestCases(ITestFilter filter)
{
if (LoadedTest == null)
throw new InvalidOperationException("The CountTestCases method was called but no test has been loaded");
return CountTestCases(LoadedTest, filter);
}
/// <summary>
/// Explore the test cases using a filter
/// </summary>
/// <param name="filter">The filter to apply</param>
/// <returns>Test Assembly with test cases that matches the filter</returns>
public ITest ExploreTests(ITestFilter filter)
{
if (LoadedTest == null)
throw new InvalidOperationException("The ExploreTests method was called but no test has been loaded");
if (filter == TestFilter.Empty)
return LoadedTest;
return new TestAssembly(LoadedTest as TestAssembly, filter);
}
/// <summary>
/// Run selected tests and return a test result. The test is run synchronously,
/// and the listener interface is notified as it progresses.
/// </summary>
/// <param name="listener">Interface to receive EventListener notifications.</param>
/// <param name="filter">A test filter used to select tests to be run</param>
/// <returns></returns>
public ITestResult Run(ITestListener listener, ITestFilter filter)
{
RunAsync(listener, filter);
WaitForCompletion(Timeout.Infinite);
return Result;
}
/// <summary>
/// Run selected tests asynchronously, notifying the listener interface as it progresses.
/// </summary>
/// <param name="listener">Interface to receive EventListener notifications.</param>
/// <param name="filter">A test filter used to select tests to be run</param>
/// <remarks>
/// RunAsync is a template method, calling various abstract and
/// virtual methods to be overridden by derived classes.
/// </remarks>
public void RunAsync(ITestListener listener, ITestFilter filter)
{
log.Info("Running tests");
if (LoadedTest == null)
throw new InvalidOperationException("The Run method was called but no test has been loaded");
_runComplete.Reset();
CreateTestExecutionContext(listener);
TopLevelWorkItem = WorkItemBuilder.CreateWorkItem(LoadedTest, filter, true);
TopLevelWorkItem.InitializeContext(Context);
TopLevelWorkItem.Completed += OnRunCompleted;
StartRun(listener);
}
/// <summary>
/// Wait for the ongoing run to complete.
/// </summary>
/// <param name="timeout">Time to wait in milliseconds</param>
/// <returns>True if the run completed, otherwise false</returns>
public bool WaitForCompletion(int timeout)
{
return _runComplete.Wait(timeout);
}
/// <summary>
/// Signal any test run that is in process to stop. Return without error if no test is running.
/// </summary>
/// <param name="force">If true, kill any tests that are currently running</param>
public void StopRun(bool force)
{
if (IsTestRunning)
{
Context.ExecutionStatus = force
? TestExecutionStatus.AbortRequested
: TestExecutionStatus.StopRequested;
Context.Dispatcher.CancelRun(force);
}
}
#endregion
#region Helper Methods
/// <summary>
/// Initiate the test run.
/// </summary>
private void StartRun(ITestListener listener)
{
// Save Console.Out and Error for later restoration
_savedOut = Console.Out;
_savedErr = Console.Error;
Console.SetOut(new TextCapture(Console.Out));
Console.SetError(new EventListenerTextWriter("Error", Console.Error));
#if PARALLEL
// Queue and pump events, unless settings have SynchronousEvents == false
if (!Settings.ContainsKey(FrameworkPackageSettings.SynchronousEvents) || !(bool)Settings[FrameworkPackageSettings.SynchronousEvents])
{
QueuingEventListener queue = new QueuingEventListener();
Context.Listener = queue;
_pump = new EventPump(listener, queue.Events);
_pump.Start();
}
#endif
if (!System.Diagnostics.Debugger.IsAttached &&
Settings.ContainsKey(FrameworkPackageSettings.DebugTests) &&
(bool)Settings[FrameworkPackageSettings.DebugTests])
{
try
{
System.Diagnostics.Debugger.Launch();
}
catch (SecurityException)
{
TopLevelWorkItem.MarkNotRunnable("System.Security.Permissions.UIPermission is not set to start the debugger.");
return;
}
//System.Diagnostics.Debugger.Launch() not implemented on mono
catch (NotImplementedException)
{
TopLevelWorkItem.MarkNotRunnable("Debugger unavailable on this platform.");
return;
}
}
#if NET20 || NET35 || NET40 || NET45
if (Settings.ContainsKey(FrameworkPackageSettings.PauseBeforeRun) &&
(bool)Settings[FrameworkPackageSettings.PauseBeforeRun])
PauseBeforeRun();
#endif
Context.Dispatcher.Start(TopLevelWorkItem);
}
/// <summary>
/// Create the initial TestExecutionContext used to run tests
/// </summary>
/// <param name="listener">The ITestListener specified in the RunAsync call</param>
private void CreateTestExecutionContext(ITestListener listener)
{
Context = new TestExecutionContext();
// Apply package settings to the context
if (Settings.ContainsKey(FrameworkPackageSettings.DefaultTimeout))
Context.TestCaseTimeout = (int)Settings[FrameworkPackageSettings.DefaultTimeout];
if (Settings.ContainsKey(FrameworkPackageSettings.StopOnError))
Context.StopOnError = (bool)Settings[FrameworkPackageSettings.StopOnError];
// Apply attributes to the context
// Set the listener - overriding runners may replace this
Context.Listener = listener;
#if !PARALLEL
Context.Dispatcher = new MainThreadWorkItemDispatcher();
#else
int levelOfParallelism = GetLevelOfParallelism();
if (Settings.ContainsKey(FrameworkPackageSettings.RunOnMainThread) &&
(bool)Settings[FrameworkPackageSettings.RunOnMainThread])
Context.Dispatcher = new MainThreadWorkItemDispatcher();
else if (levelOfParallelism > 0)
Context.Dispatcher = new ParallelWorkItemDispatcher(levelOfParallelism);
else
Context.Dispatcher = new SimpleWorkItemDispatcher();
#endif
}
/// <summary>
/// Handle the Completed event for the top level work item
/// </summary>
private void OnRunCompleted(object sender, EventArgs e)
{
#if PARALLEL
if (_pump != null)
_pump.Dispose();
#endif
Console.SetOut(_savedOut);
Console.SetError(_savedErr);
_runComplete.Set();
}
private int CountTestCases(ITest test, ITestFilter filter)
{
if (!test.IsSuite)
return 1;
int count = 0;
foreach (ITest child in test.Tests)
if (filter.Pass(child))
count += CountTestCases(child, filter);
return count;
}
#if PARALLEL
private int GetLevelOfParallelism()
{
return Settings.ContainsKey(FrameworkPackageSettings.NumberOfTestWorkers)
? (int)Settings[FrameworkPackageSettings.NumberOfTestWorkers]
: (LoadedTest.Properties.ContainsKey(PropertyNames.LevelOfParallelism)
? (int)LoadedTest.Properties.Get(PropertyNames.LevelOfParallelism)
: NUnitTestAssemblyRunner.DefaultLevelOfParallelism);
}
#endif
#if NET20 || NET35 || NET40 || NET45
// This method invokes members on the 'System.Diagnostics.Process' class and must satisfy the link demand of
// the full-trust 'PermissionSetAttribute' on this class. Callers of this method have no influence on how the
// Process class is used, so we can safely satisfy the link demand with a 'SecuritySafeCriticalAttribute' rather
// than a 'SecurityCriticalAttribute' and allow use by security transparent callers.
[SecuritySafeCritical]
private static void PauseBeforeRun()
{
var process = Process.GetCurrentProcess();
string attachMessage = string.Format("Attach debugger to Process {0}.exe with Id {1} if desired.", process.ProcessName, process.Id);
MessageBox.Show(attachMessage, process.ProcessName, MessageBoxButtons.OK, MessageBoxIcon.Information);
}
#endif
#endregion
}
}
| |
using System;
using System.Data;
using System.Web;
namespace Codentia.Common.Membership
{
/// <summary>
/// This class represents an Address within the system
/// </summary>
public class Address : IAddress
{
private int _addressId;
private Country _country;
private string _title = string.Empty;
private string _firstName = string.Empty;
private string _lastName = string.Empty;
private string _houseName;
private string _street;
private string _town;
private string _city;
private string _county;
private string _postcode;
private Guid _cookie;
private bool _isCountryOnlyAddress;
private int _emailAddressId;
/// <summary>
/// Initializes a new instance of the <see cref="Address"/> class.
/// </summary>
/// <param name="addressId">The address id.</param>
public Address(int addressId)
{
PopulateById(Guid.Empty, addressId);
}
/// <summary>
/// Initializes a new instance of the <see cref="Address"/> class.
/// </summary>
/// <param name="dr">The dr.</param>
public Address(DataRow dr)
{
PopulateByDataRow(dr);
}
/// <summary>
/// Initializes a new instance of the <see cref="Address"/> class.
/// </summary>
/// <param name="cookie">The cookie.</param>
public Address(Guid cookie)
{
PopulateViaCookie(cookie);
}
/// <summary>
/// Initializes a new instance of the <see cref="Address"/> class.
/// </summary>
/// <param name="context">The context.</param>
/// <param name="addressCookieName">Name of the address cookie.</param>
/// <param name="emailCookieName">Name of the email cookie.</param>
public Address(HttpContext context, string addressCookieName, string emailCookieName)
{
Contact cn = new Contact(context, emailCookieName);
Guid addressGuid = Guid.Empty;
HttpCookie addressCookie = context.Request.Cookies[addressCookieName];
if (addressCookie != null)
{
if (!string.IsNullOrEmpty(addressCookie.Value))
{
addressGuid = new Guid(addressCookie.Value);
}
}
if (addressGuid == Guid.Empty)
{
throw new ArgumentException("A valid address cannot be found, empty or missing Guid");
}
if (!AddressData.AddressExists(addressGuid, cn.ConfirmGuid))
{
throw new ArgumentException(string.Format("Cookie mismatch: address record for addressGuid={0} does not match email address record for emailAddressGuid={1}", addressGuid.ToString(), cn.ConfirmGuid.ToString()));
}
PopulateViaCookie(addressGuid);
}
private Address(Guid txnId, int addressId)
{
PopulateById(txnId, addressId);
}
/// <summary>
/// Gets the address id.
/// </summary>
public int AddressId
{
get
{
return _addressId;
}
}
/// <summary>
/// Gets the contact.
/// </summary>
public Contact Contact
{
get
{
return new Contact(_emailAddressId);
}
}
/// <summary>
/// Gets or sets the country.
/// </summary>
/// <value>
/// The country.
/// </value>
public Country Country
{
get
{
return _country;
}
set
{
if (value == null || _country == null || _country.CountryId != value.CountryId)
{
_country = value;
Save();
}
}
}
/// <summary>
/// Gets the country.
/// </summary>
string IAddress.Country
{
get
{
return _country == null ? string.Empty : _country.DisplayText;
}
}
/// <summary>
/// Gets a value indicating whether this instance is country only address.
/// </summary>
/// <value>
/// <c>true</c> if this instance is country only address; otherwise, <c>false</c>.
/// </value>
public bool IsCountryOnlyAddress
{
get
{
return _isCountryOnlyAddress;
}
}
/// <summary>
/// Gets or sets the title.
/// </summary>
/// <value>
/// The title.
/// </value>
public string Title
{
get
{
return _title;
}
set
{
if (_title != value)
{
_title = value;
Save();
}
}
}
/// <summary>
/// Gets or sets the first name.
/// </summary>
public string FirstName
{
get
{
return _firstName;
}
set
{
if (_firstName != value)
{
_firstName = value;
Save();
}
}
}
/// <summary>
/// Gets or sets the last name.
/// </summary>
public string LastName
{
get
{
return _lastName;
}
set
{
if (_lastName != value)
{
_lastName = value;
Save();
}
}
}
/// <summary>
/// Gets or sets the HouseName
/// </summary>
public string HouseName
{
get
{
return _houseName;
}
set
{
if (_houseName != value)
{
_houseName = value;
Save();
}
}
}
/// <summary>
/// Gets or sets the Street
/// </summary>
public string Street
{
get
{
return _street;
}
set
{
if (_street != value)
{
_street = value;
Save();
}
}
}
/// <summary>
/// Gets or sets Town
/// </summary>
public string Town
{
get
{
if (_town == null)
{
_town = string.Empty;
}
return _town;
}
set
{
if (_town != value)
{
_town = value;
Save();
}
}
}
/// <summary>
/// Gets or sets the City
/// </summary>
public string City
{
get
{
return _city;
}
set
{
if (_city != value)
{
_city = value;
Save();
}
}
}
/// <summary>
/// Gets or sets the County
/// </summary>
public string County
{
get
{
return _county;
}
set
{
if (_county != value)
{
_county = value;
Save();
}
}
}
/// <summary>
/// Gets or sets the Postcode
/// </summary>
public string Postcode
{
get
{
return _postcode;
}
set
{
if (_postcode != value)
{
_postcode = value;
Save();
}
}
}
/// <summary>
/// Gets the cookie.
/// </summary>
public Guid Cookie
{
get
{
return _cookie;
}
}
/// <summary>
/// Creates the address.
/// </summary>
/// <param name="title">The title.</param>
/// <param name="firstName">The first name.</param>
/// <param name="lastName">The last name.</param>
/// <param name="houseName">Name of the house.</param>
/// <param name="street">The street.</param>
/// <param name="town">The town.</param>
/// <param name="city">The city.</param>
/// <param name="county">The county.</param>
/// <param name="postCode">The post code.</param>
/// <param name="country">The country.</param>
/// <param name="emailAddress">The email address.</param>
/// <returns>
/// Address object
/// </returns>
public static Address CreateAddress(string title, string firstName, string lastName, string houseName, string street, string town, string city, string county, string postCode, Country country, string emailAddress)
{
return CreateAddress(Guid.Empty, title, firstName, lastName, houseName, street, town, city, county, postCode, country, emailAddress);
}
/// <summary>
/// Creates the address.
/// </summary>
/// <param name="txnId">The TXN id.</param>
/// <param name="title">The title.</param>
/// <param name="firstName">The first name.</param>
/// <param name="lastName">The last name.</param>
/// <param name="houseName">Name of the house.</param>
/// <param name="street">The street.</param>
/// <param name="town">The town.</param>
/// <param name="city">The city.</param>
/// <param name="county">The county.</param>
/// <param name="postCode">The post code.</param>
/// <param name="country">The country.</param>
/// <param name="emailAddress">The email address.</param>
/// <returns>
/// Address object
/// </returns>
public static Address CreateAddress(Guid txnId, string title, string firstName, string lastName, string houseName, string street, string town, string city, string county, string postCode, Country country, string emailAddress)
{
if (country == null)
{
throw new ArgumentException("country: was not specified");
}
Contact contact = null;
if (ContactData.EmailAddressExists(emailAddress))
{
contact = new Contact(emailAddress);
}
else
{
contact = Contact.CreateContact(emailAddress);
}
int addressId = AddressData.CreateAddress(txnId, title, firstName, lastName, houseName, street, town, city, county, postCode, country.CountryId, contact.EmailAddressId);
return new Address(txnId, addressId);
}
/// <summary>
/// Create a Country Only address
/// </summary>
/// <param name="txnId">transaction Id</param>
/// <param name="countryId">Database Id of Country</param>
/// <param name="emailAddressId">All addresses are associated to an emailAddressId</param>
/// <returns>The Address</returns>
public static Address CreateAddress(Guid txnId, int countryId, int emailAddressId)
{
int addressId = AddressData.CreateCountryOnlyAddress(txnId, countryId, emailAddressId);
return new Address(txnId, addressId);
}
/// <summary>
/// Return a list of countries in a format suitable for front-end binding and/or use
/// </summary>
/// <returns>LookupPair array</returns>
public static LookupPair[] GetCountryList()
{
DataTable countries = CountryData.GetCountries();
LookupPair[] results = new LookupPair[countries.Rows.Count];
for (int i = 0; i < results.Length; i++)
{
results[i] = new LookupPair(Convert.ToString(countries.Rows[i]["CountryId"]), Convert.ToString(countries.Rows[i]["DisplayText"]));
}
return results;
}
/// <summary>
/// ConcatenateAddress - provide address in delimited format
/// </summary>
/// <param name="delimiter">The delimiter.</param>
/// <param name="isPostCodeRequired">if set to <c>true</c> [is post code required].</param>
/// <returns>
/// string of concatenated address
/// </returns>
public string ConcatenateAddress(string delimiter, bool isPostCodeRequired)
{
string returnVal = string.Empty;
string townString = string.Empty;
if (!string.IsNullOrEmpty(_town))
{
townString = string.Format("{0}{1}", _town, delimiter);
}
if (_isCountryOnlyAddress)
{
returnVal = _country.DisplayText;
}
else
{
string name = string.Format("{0}{1}{2}{3}{4}", _title, string.IsNullOrEmpty(_title) ? string.Empty : " ", _firstName, string.IsNullOrEmpty(_firstName) ? string.Empty : " ", _lastName);
if (!string.IsNullOrEmpty(name))
{
returnVal = string.Format("{0}{1}{2}{3}{4}{5}{6}{7}{8}{9}", name, delimiter, _houseName, delimiter, _street, delimiter, townString, _city, delimiter, _county);
}
else
{
returnVal = string.Format("{0}{1}{2}{3}{4}{5}{6}{7}", _houseName, delimiter, _street, delimiter, townString, _city, delimiter, _county);
}
if (isPostCodeRequired)
{
returnVal = string.Format("{0}{1}{2}", returnVal, delimiter, _postcode);
}
returnVal = string.Format("{0}{1}{2}", returnVal, delimiter, _country.DisplayText);
}
return returnVal;
}
/// <summary>
/// Updates from another address.
/// </summary>
/// <param name="add">The add.</param>
public void UpdateFromAnotherAddress(Address add)
{
_title = add.Title;
_firstName = add.FirstName;
_lastName = add.LastName;
_houseName = add.HouseName;
_street = add.Street;
_town = add.Town;
_city = add.City;
_county = add.County;
_postcode = add.Postcode;
_country = add.Country;
Save();
}
private void PopulateViaCookie(Guid cookie)
{
DataTable dt = AddressData.GetAddressByCookie(cookie);
if (dt != null)
{
if (dt.Rows.Count > 0)
{
int addressId = Convert.ToInt32(dt.Rows[0]["Addressid"]);
PopulateById(Guid.Empty, addressId);
}
}
}
/// <summary>
/// Saves this instance.
/// </summary>
private void Save()
{
if (_isCountryOnlyAddress)
{
AddressData.UpdateCountryOnlyAddress(_addressId, _country.CountryId);
}
else
{
AddressData.UpdateAddress(_addressId, _title, _firstName, _lastName, _houseName, _street, _town, _city, _county, _postcode, _country.CountryId);
}
}
/// <summary>
/// PopulateById
/// Populate the address object using the addressId
/// </summary>
/// <param name="txnId">transaction Id</param>
/// <param name="addressId">Database Id of Address</param>
private void PopulateById(Guid txnId, int addressId)
{
DataTable dt = AddressData.GetAddressById(txnId, addressId);
PopulateByDataRow(dt.Rows[0]);
}
private void PopulateByDataRow(DataRow dr)
{
// addressId is split so that an application can implement a different table for addresses other than Address
// update optional members
_title = Convert.ToString(dr["Title"]);
_firstName = Convert.ToString(dr["FirstName"]);
_lastName = Convert.ToString(dr["LastName"]);
_town = Convert.ToString(dr["Town"]);
// update non optional members
_country = new Country(Convert.ToInt32(dr["CountryId"]));
_houseName = Convert.ToString(dr["HouseName"]);
_street = Convert.ToString(dr["Street"]);
_city = Convert.ToString(dr["City"]);
_county = Convert.ToString(dr["County"]);
_postcode = Convert.ToString(dr["PostCode"]);
_cookie = new Guid(Convert.ToString(dr["Cookie"]));
_addressId = Convert.ToInt32(dr["AddressId"]);
_emailAddressId = Convert.ToInt32(dr["EmailAddressId"]);
if (_firstName == string.Empty && _houseName == string.Empty && _street == string.Empty)
{
_isCountryOnlyAddress = true;
}
}
}
}
| |
namespace GrantApp
{
partial class AddGrant
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.lblGrantor = new System.Windows.Forms.Label();
this.btnCancel = new System.Windows.Forms.Button();
this.grantSubmitButton = new System.Windows.Forms.Button();
this.lblName = new System.Windows.Forms.Label();
this.grantNameText = new System.Windows.Forms.TextBox();
this.addGrantorButton = new System.Windows.Forms.Button();
this.label5 = new System.Windows.Forms.Label();
this.requestedAmountText = new System.Windows.Forms.TextBox();
this.label6 = new System.Windows.Forms.Label();
this.actualAmountText = new System.Windows.Forms.TextBox();
this.label1 = new System.Windows.Forms.Label();
this.grantDescText = new System.Windows.Forms.TextBox();
this.label2 = new System.Windows.Forms.Label();
this.submitDatePicker = new System.Windows.Forms.DateTimePicker();
this.label3 = new System.Windows.Forms.Label();
this.label4 = new System.Windows.Forms.Label();
this.label7 = new System.Windows.Forms.Label();
this.label8 = new System.Windows.Forms.Label();
this.dueDatePicker = new System.Windows.Forms.DateTimePicker();
this.turnaroundPicker = new System.Windows.Forms.DateTimePicker();
this.startDatePicker = new System.Windows.Forms.DateTimePicker();
this.paymentDatePicker = new System.Windows.Forms.DateTimePicker();
this.label9 = new System.Windows.Forms.Label();
this.label10 = new System.Windows.Forms.Label();
this.label14 = new System.Windows.Forms.Label();
this.label13 = new System.Windows.Forms.Label();
this.label12 = new System.Windows.Forms.Label();
this.grantorUsernameText = new System.Windows.Forms.TextBox();
this.grantorPasswordText = new System.Windows.Forms.TextBox();
this.label15 = new System.Windows.Forms.Label();
this.programsList = new System.Windows.Forms.ListBox();
this.label11 = new System.Windows.Forms.Label();
this.label16 = new System.Windows.Forms.Label();
this.projectsList = new System.Windows.Forms.ListBox();
this.requirementsList = new System.Windows.Forms.ListBox();
this.label17 = new System.Windows.Forms.Label();
this.cbSubmit = new System.Windows.Forms.CheckBox();
this.cbDue = new System.Windows.Forms.CheckBox();
this.cbTurnAround = new System.Windows.Forms.CheckBox();
this.cbStart = new System.Windows.Forms.CheckBox();
this.cbPayment = new System.Windows.Forms.CheckBox();
this.grantorWebsiteText = new System.Windows.Forms.TextBox();
this.label18 = new System.Windows.Forms.Label();
this.label19 = new System.Windows.Forms.Label();
this.attachmentsList = new System.Windows.Forms.ListBox();
this.addAttachmentButton = new System.Windows.Forms.Button();
this.editAttachmentButton = new System.Windows.Forms.Button();
this.addProjectButton = new System.Windows.Forms.Button();
this.deleteProjectButton = new System.Windows.Forms.Button();
this.deleteProgramButton = new System.Windows.Forms.Button();
this.addProgramButton = new System.Windows.Forms.Button();
this.deleteDocTypeButton = new System.Windows.Forms.Button();
this.addDocTypeButton = new System.Windows.Forms.Button();
this.grantNotesText = new GrantApp.TextBoxWithExpandButton();
this.emphasisText = new GrantApp.TextBoxWithExpandButton();
this.statusDropdown = new GrantApp.BetterComboBox();
this.grantWriterDropdown = new GrantApp.BetterComboBox();
this.grantorDropdown = new GrantApp.BetterComboBox();
this.SuspendLayout();
//
// lblGrantor
//
this.lblGrantor.AutoSize = true;
this.lblGrantor.Location = new System.Drawing.Point(12, 72);
this.lblGrantor.Name = "lblGrantor";
this.lblGrantor.Size = new System.Drawing.Size(45, 13);
this.lblGrantor.TabIndex = 4;
this.lblGrantor.Text = "Grantor:";
//
// btnCancel
//
this.btnCancel.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Right)));
this.btnCancel.CausesValidation = false;
this.btnCancel.DialogResult = System.Windows.Forms.DialogResult.Cancel;
this.btnCancel.Location = new System.Drawing.Point(742, 582);
this.btnCancel.Name = "btnCancel";
this.btnCancel.Size = new System.Drawing.Size(80, 23);
this.btnCancel.TabIndex = 57;
this.btnCancel.Text = "Cancel";
this.btnCancel.UseVisualStyleBackColor = true;
//
// grantSubmitButton
//
this.grantSubmitButton.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Right)));
this.grantSubmitButton.DialogResult = System.Windows.Forms.DialogResult.OK;
this.grantSubmitButton.Location = new System.Drawing.Point(656, 582);
this.grantSubmitButton.Name = "grantSubmitButton";
this.grantSubmitButton.Size = new System.Drawing.Size(80, 23);
this.grantSubmitButton.TabIndex = 56;
this.grantSubmitButton.Text = "Submit";
this.grantSubmitButton.UseVisualStyleBackColor = true;
this.grantSubmitButton.Click += new System.EventHandler(this.grantSubmitButton_Click);
//
// lblName
//
this.lblName.AutoSize = true;
this.lblName.Location = new System.Drawing.Point(12, 9);
this.lblName.Name = "lblName";
this.lblName.Size = new System.Drawing.Size(38, 13);
this.lblName.TabIndex = 0;
this.lblName.Text = "Name:";
//
// grantNameText
//
this.grantNameText.Location = new System.Drawing.Point(78, 6);
this.grantNameText.Name = "grantNameText";
this.grantNameText.Size = new System.Drawing.Size(241, 20);
this.grantNameText.TabIndex = 1;
//
// addGrantorButton
//
this.addGrantorButton.Location = new System.Drawing.Point(216, 105);
this.addGrantorButton.Name = "addGrantorButton";
this.addGrantorButton.Size = new System.Drawing.Size(103, 23);
this.addGrantorButton.TabIndex = 6;
this.addGrantorButton.Text = "Add New Grantor";
this.addGrantorButton.UseVisualStyleBackColor = true;
//
// label5
//
this.label5.AutoSize = true;
this.label5.Location = new System.Drawing.Point(12, 335);
this.label5.Name = "label5";
this.label5.Size = new System.Drawing.Size(101, 13);
this.label5.TabIndex = 22;
this.label5.Text = "Requested Amount:";
//
// requestedAmountText
//
this.requestedAmountText.Location = new System.Drawing.Point(119, 332);
this.requestedAmountText.Name = "requestedAmountText";
this.requestedAmountText.Size = new System.Drawing.Size(200, 20);
this.requestedAmountText.TabIndex = 23;
this.requestedAmountText.Text = "0.00";
//
// label6
//
this.label6.AutoSize = true;
this.label6.Location = new System.Drawing.Point(12, 362);
this.label6.Name = "label6";
this.label6.Size = new System.Drawing.Size(76, 13);
this.label6.TabIndex = 24;
this.label6.Text = "Actual Amount";
//
// actualAmountText
//
this.actualAmountText.Location = new System.Drawing.Point(119, 359);
this.actualAmountText.Name = "actualAmountText";
this.actualAmountText.Size = new System.Drawing.Size(200, 20);
this.actualAmountText.TabIndex = 25;
//
// label1
//
this.label1.AutoSize = true;
this.label1.Location = new System.Drawing.Point(12, 42);
this.label1.Name = "label1";
this.label1.Size = new System.Drawing.Size(63, 13);
this.label1.TabIndex = 2;
this.label1.Text = "Description:";
//
// grantDescText
//
this.grantDescText.Location = new System.Drawing.Point(78, 39);
this.grantDescText.Name = "grantDescText";
this.grantDescText.Size = new System.Drawing.Size(241, 20);
this.grantDescText.TabIndex = 3;
//
// label2
//
this.label2.AutoSize = true;
this.label2.Location = new System.Drawing.Point(12, 147);
this.label2.Name = "label2";
this.label2.Size = new System.Drawing.Size(68, 13);
this.label2.TabIndex = 7;
this.label2.Text = "Submit Date:";
//
// submitDatePicker
//
this.submitDatePicker.Enabled = false;
this.submitDatePicker.Location = new System.Drawing.Point(108, 147);
this.submitDatePicker.Name = "submitDatePicker";
this.submitDatePicker.Size = new System.Drawing.Size(211, 20);
this.submitDatePicker.TabIndex = 8;
//
// label3
//
this.label3.AutoSize = true;
this.label3.Location = new System.Drawing.Point(12, 182);
this.label3.Name = "label3";
this.label3.Size = new System.Drawing.Size(56, 13);
this.label3.TabIndex = 10;
this.label3.Text = "Due Date:";
//
// label4
//
this.label4.AutoSize = true;
this.label4.Location = new System.Drawing.Point(12, 213);
this.label4.Name = "label4";
this.label4.Size = new System.Drawing.Size(91, 13);
this.label4.TabIndex = 13;
this.label4.Text = "Turnaround Date:";
//
// label7
//
this.label7.AutoSize = true;
this.label7.Location = new System.Drawing.Point(12, 244);
this.label7.Name = "label7";
this.label7.Size = new System.Drawing.Size(58, 13);
this.label7.TabIndex = 16;
this.label7.Text = "Start Date:";
//
// label8
//
this.label8.AutoSize = true;
this.label8.Location = new System.Drawing.Point(12, 277);
this.label8.Name = "label8";
this.label8.Size = new System.Drawing.Size(77, 13);
this.label8.TabIndex = 19;
this.label8.Text = "Payment Date:";
//
// dueDatePicker
//
this.dueDatePicker.Enabled = false;
this.dueDatePicker.Location = new System.Drawing.Point(108, 182);
this.dueDatePicker.Name = "dueDatePicker";
this.dueDatePicker.Size = new System.Drawing.Size(211, 20);
this.dueDatePicker.TabIndex = 11;
//
// turnaroundPicker
//
this.turnaroundPicker.Enabled = false;
this.turnaroundPicker.Location = new System.Drawing.Point(109, 213);
this.turnaroundPicker.Name = "turnaroundPicker";
this.turnaroundPicker.Size = new System.Drawing.Size(210, 20);
this.turnaroundPicker.TabIndex = 14;
//
// startDatePicker
//
this.startDatePicker.Enabled = false;
this.startDatePicker.Location = new System.Drawing.Point(108, 244);
this.startDatePicker.Name = "startDatePicker";
this.startDatePicker.Size = new System.Drawing.Size(211, 20);
this.startDatePicker.TabIndex = 17;
//
// paymentDatePicker
//
this.paymentDatePicker.Enabled = false;
this.paymentDatePicker.Location = new System.Drawing.Point(108, 277);
this.paymentDatePicker.Name = "paymentDatePicker";
this.paymentDatePicker.Size = new System.Drawing.Size(211, 20);
this.paymentDatePicker.TabIndex = 20;
//
// label9
//
this.label9.AutoSize = true;
this.label9.Location = new System.Drawing.Point(15, 492);
this.label9.Name = "label9";
this.label9.Size = new System.Drawing.Size(100, 13);
this.label9.TabIndex = 30;
this.label9.Text = "Website Username:";
//
// label10
//
this.label10.AutoSize = true;
this.label10.Location = new System.Drawing.Point(15, 517);
this.label10.Name = "label10";
this.label10.Size = new System.Drawing.Size(98, 13);
this.label10.TabIndex = 32;
this.label10.Text = "Website Password:";
//
// label14
//
this.label14.AutoSize = true;
this.label14.Location = new System.Drawing.Point(592, 147);
this.label14.Name = "label14";
this.label14.Size = new System.Drawing.Size(97, 13);
this.label14.TabIndex = 48;
this.label14.Text = "Program Emphasis:";
//
// label13
//
this.label13.AutoSize = true;
this.label13.Location = new System.Drawing.Point(347, 362);
this.label13.Name = "label13";
this.label13.Size = new System.Drawing.Size(38, 13);
this.label13.TabIndex = 54;
this.label13.Text = "Notes:";
//
// label12
//
this.label12.AutoSize = true;
this.label12.Location = new System.Drawing.Point(15, 555);
this.label12.Name = "label12";
this.label12.Size = new System.Drawing.Size(67, 13);
this.label12.TabIndex = 34;
this.label12.Text = "Grant Writer:";
//
// grantorUsernameText
//
this.grantorUsernameText.Location = new System.Drawing.Point(119, 489);
this.grantorUsernameText.Name = "grantorUsernameText";
this.grantorUsernameText.Size = new System.Drawing.Size(200, 20);
this.grantorUsernameText.TabIndex = 31;
//
// grantorPasswordText
//
this.grantorPasswordText.Location = new System.Drawing.Point(119, 514);
this.grantorPasswordText.Name = "grantorPasswordText";
this.grantorPasswordText.Size = new System.Drawing.Size(200, 20);
this.grantorPasswordText.TabIndex = 33;
//
// label15
//
this.label15.AutoSize = true;
this.label15.Location = new System.Drawing.Point(15, 410);
this.label15.Name = "label15";
this.label15.Size = new System.Drawing.Size(40, 13);
this.label15.TabIndex = 26;
this.label15.Text = "Status:";
//
// programsList
//
this.programsList.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.programsList.FormattingEnabled = true;
this.programsList.Location = new System.Drawing.Point(595, 26);
this.programsList.Name = "programsList";
this.programsList.SelectionMode = System.Windows.Forms.SelectionMode.MultiSimple;
this.programsList.Size = new System.Drawing.Size(227, 82);
this.programsList.Sorted = true;
this.programsList.TabIndex = 45;
this.programsList.SelectedIndexChanged += new System.EventHandler(this.programsList_SelectedIndexChanged);
//
// label11
//
this.label11.AutoSize = true;
this.label11.Location = new System.Drawing.Point(592, 9);
this.label11.Name = "label11";
this.label11.Size = new System.Drawing.Size(54, 13);
this.label11.TabIndex = 44;
this.label11.Text = "Programs:";
//
// label16
//
this.label16.AutoSize = true;
this.label16.Location = new System.Drawing.Point(347, 9);
this.label16.Name = "label16";
this.label16.Size = new System.Drawing.Size(48, 13);
this.label16.TabIndex = 36;
this.label16.Text = "Projects:";
//
// projectsList
//
this.projectsList.FormattingEnabled = true;
this.projectsList.Location = new System.Drawing.Point(350, 26);
this.projectsList.Name = "projectsList";
this.projectsList.SelectionMode = System.Windows.Forms.SelectionMode.MultiSimple;
this.projectsList.Size = new System.Drawing.Size(229, 121);
this.projectsList.Sorted = true;
this.projectsList.TabIndex = 37;
this.projectsList.SelectedIndexChanged += new System.EventHandler(this.projectsList_SelectedIndexChanged);
//
// requirementsList
//
this.requirementsList.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.requirementsList.FormattingEnabled = true;
this.requirementsList.Location = new System.Drawing.Point(595, 241);
this.requirementsList.Name = "requirementsList";
this.requirementsList.SelectionMode = System.Windows.Forms.SelectionMode.MultiSimple;
this.requirementsList.Size = new System.Drawing.Size(227, 95);
this.requirementsList.Sorted = true;
this.requirementsList.TabIndex = 51;
this.requirementsList.SelectedIndexChanged += new System.EventHandler(this.documentationList_SelectedIndexChanged);
//
// label17
//
this.label17.AutoSize = true;
this.label17.Location = new System.Drawing.Point(592, 225);
this.label17.Name = "label17";
this.label17.Size = new System.Drawing.Size(75, 13);
this.label17.TabIndex = 50;
this.label17.Text = "Requirements:";
//
// cbSubmit
//
this.cbSubmit.AutoSize = true;
this.cbSubmit.Location = new System.Drawing.Point(326, 149);
this.cbSubmit.Name = "cbSubmit";
this.cbSubmit.Size = new System.Drawing.Size(15, 14);
this.cbSubmit.TabIndex = 9;
this.cbSubmit.UseVisualStyleBackColor = true;
this.cbSubmit.CheckedChanged += new System.EventHandler(this.cbSubmit_CheckedChanged);
//
// cbDue
//
this.cbDue.AutoSize = true;
this.cbDue.Location = new System.Drawing.Point(325, 185);
this.cbDue.Name = "cbDue";
this.cbDue.Size = new System.Drawing.Size(15, 14);
this.cbDue.TabIndex = 12;
this.cbDue.UseVisualStyleBackColor = true;
this.cbDue.CheckedChanged += new System.EventHandler(this.cbDue_CheckedChanged);
//
// cbTurnAround
//
this.cbTurnAround.AutoSize = true;
this.cbTurnAround.Location = new System.Drawing.Point(326, 216);
this.cbTurnAround.Name = "cbTurnAround";
this.cbTurnAround.Size = new System.Drawing.Size(15, 14);
this.cbTurnAround.TabIndex = 15;
this.cbTurnAround.UseVisualStyleBackColor = true;
this.cbTurnAround.CheckedChanged += new System.EventHandler(this.cbTurnAround_CheckedChanged);
//
// cbStart
//
this.cbStart.AutoSize = true;
this.cbStart.Location = new System.Drawing.Point(325, 247);
this.cbStart.Name = "cbStart";
this.cbStart.Size = new System.Drawing.Size(15, 14);
this.cbStart.TabIndex = 18;
this.cbStart.UseVisualStyleBackColor = true;
this.cbStart.CheckedChanged += new System.EventHandler(this.cbStart_CheckedChanged);
//
// cbPayment
//
this.cbPayment.AutoSize = true;
this.cbPayment.Location = new System.Drawing.Point(326, 280);
this.cbPayment.Name = "cbPayment";
this.cbPayment.Size = new System.Drawing.Size(15, 14);
this.cbPayment.TabIndex = 21;
this.cbPayment.UseVisualStyleBackColor = true;
this.cbPayment.CheckedChanged += new System.EventHandler(this.cbPayment_CheckedChanged);
//
// grantorWebsiteText
//
this.grantorWebsiteText.Location = new System.Drawing.Point(119, 463);
this.grantorWebsiteText.Name = "grantorWebsiteText";
this.grantorWebsiteText.Size = new System.Drawing.Size(200, 20);
this.grantorWebsiteText.TabIndex = 29;
//
// label18
//
this.label18.AutoSize = true;
this.label18.Location = new System.Drawing.Point(15, 466);
this.label18.Name = "label18";
this.label18.Size = new System.Drawing.Size(49, 13);
this.label18.TabIndex = 28;
this.label18.Text = "Website:";
//
// label19
//
this.label19.AutoSize = true;
this.label19.Location = new System.Drawing.Point(348, 186);
this.label19.Name = "label19";
this.label19.Size = new System.Drawing.Size(69, 13);
this.label19.TabIndex = 40;
this.label19.Text = "Attachments:";
//
// attachmentsList
//
this.attachmentsList.FormattingEnabled = true;
this.attachmentsList.Location = new System.Drawing.Point(351, 202);
this.attachmentsList.Name = "attachmentsList";
this.attachmentsList.Size = new System.Drawing.Size(229, 134);
this.attachmentsList.Sorted = true;
this.attachmentsList.TabIndex = 41;
this.attachmentsList.SelectedIndexChanged += new System.EventHandler(this.attachmentsList_SelectedIndexChanged);
//
// addAttachmentButton
//
this.addAttachmentButton.Location = new System.Drawing.Point(532, 342);
this.addAttachmentButton.Name = "addAttachmentButton";
this.addAttachmentButton.Size = new System.Drawing.Size(48, 23);
this.addAttachmentButton.TabIndex = 43;
this.addAttachmentButton.Text = "Add";
this.addAttachmentButton.UseVisualStyleBackColor = true;
this.addAttachmentButton.Click += new System.EventHandler(this.addAttachmentButton_Click);
//
// editAttachmentButton
//
this.editAttachmentButton.Enabled = false;
this.editAttachmentButton.Location = new System.Drawing.Point(478, 342);
this.editAttachmentButton.Name = "editAttachmentButton";
this.editAttachmentButton.Size = new System.Drawing.Size(48, 23);
this.editAttachmentButton.TabIndex = 42;
this.editAttachmentButton.Text = "Edit";
this.editAttachmentButton.UseVisualStyleBackColor = true;
this.editAttachmentButton.Click += new System.EventHandler(this.editAttachmentButton_Click);
//
// addProjectButton
//
this.addProjectButton.Location = new System.Drawing.Point(531, 153);
this.addProjectButton.Name = "addProjectButton";
this.addProjectButton.Size = new System.Drawing.Size(48, 23);
this.addProjectButton.TabIndex = 39;
this.addProjectButton.Text = "Add";
this.addProjectButton.UseVisualStyleBackColor = true;
this.addProjectButton.Click += new System.EventHandler(this.addProjectButton_Click);
//
// deleteProjectButton
//
this.deleteProjectButton.Enabled = false;
this.deleteProjectButton.Location = new System.Drawing.Point(477, 153);
this.deleteProjectButton.Name = "deleteProjectButton";
this.deleteProjectButton.Size = new System.Drawing.Size(48, 23);
this.deleteProjectButton.TabIndex = 38;
this.deleteProjectButton.Text = "Delete";
this.deleteProjectButton.UseVisualStyleBackColor = true;
this.deleteProjectButton.Click += new System.EventHandler(this.deleteProjectButton_Click);
//
// deleteProgramButton
//
this.deleteProgramButton.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right)));
this.deleteProgramButton.Enabled = false;
this.deleteProgramButton.Location = new System.Drawing.Point(720, 114);
this.deleteProgramButton.Name = "deleteProgramButton";
this.deleteProgramButton.Size = new System.Drawing.Size(48, 23);
this.deleteProgramButton.TabIndex = 46;
this.deleteProgramButton.Text = "Delete";
this.deleteProgramButton.UseVisualStyleBackColor = true;
this.deleteProgramButton.Click += new System.EventHandler(this.deleteProgramButton_Click);
//
// addProgramButton
//
this.addProgramButton.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right)));
this.addProgramButton.Location = new System.Drawing.Point(774, 114);
this.addProgramButton.Name = "addProgramButton";
this.addProgramButton.Size = new System.Drawing.Size(48, 23);
this.addProgramButton.TabIndex = 47;
this.addProgramButton.Text = "Add";
this.addProgramButton.UseVisualStyleBackColor = true;
this.addProgramButton.Click += new System.EventHandler(this.addProgramButton_Click);
//
// deleteDocTypeButton
//
this.deleteDocTypeButton.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right)));
this.deleteDocTypeButton.Enabled = false;
this.deleteDocTypeButton.Location = new System.Drawing.Point(720, 342);
this.deleteDocTypeButton.Name = "deleteDocTypeButton";
this.deleteDocTypeButton.Size = new System.Drawing.Size(48, 23);
this.deleteDocTypeButton.TabIndex = 52;
this.deleteDocTypeButton.Text = "Delete";
this.deleteDocTypeButton.UseVisualStyleBackColor = true;
this.deleteDocTypeButton.Click += new System.EventHandler(this.deleteDocTypeButton_Click);
//
// addDocTypeButton
//
this.addDocTypeButton.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right)));
this.addDocTypeButton.Location = new System.Drawing.Point(774, 342);
this.addDocTypeButton.Name = "addDocTypeButton";
this.addDocTypeButton.Size = new System.Drawing.Size(48, 23);
this.addDocTypeButton.TabIndex = 53;
this.addDocTypeButton.Text = "Add";
this.addDocTypeButton.UseVisualStyleBackColor = true;
this.addDocTypeButton.Click += new System.EventHandler(this.addDocTypeButton_Click);
//
// grantNotesText
//
this.grantNotesText.AcceptsReturn = false;
this.grantNotesText.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom)
| System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.grantNotesText.Location = new System.Drawing.Point(351, 378);
this.grantNotesText.Multiline = true;
this.grantNotesText.Name = "grantNotesText";
this.grantNotesText.ReadOnly = false;
this.grantNotesText.ScrollBars = System.Windows.Forms.ScrollBars.None;
this.grantNotesText.Size = new System.Drawing.Size(471, 198);
this.grantNotesText.TabIndex = 55;
//
// emphasisText
//
this.emphasisText.AcceptsReturn = false;
this.emphasisText.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.emphasisText.Location = new System.Drawing.Point(595, 163);
this.emphasisText.Multiline = true;
this.emphasisText.Name = "emphasisText";
this.emphasisText.ReadOnly = false;
this.emphasisText.ScrollBars = System.Windows.Forms.ScrollBars.None;
this.emphasisText.Size = new System.Drawing.Size(227, 59);
this.emphasisText.TabIndex = 49;
//
// statusDropdown
//
this.statusDropdown.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList;
this.statusDropdown.FormattingEnabled = true;
this.statusDropdown.Location = new System.Drawing.Point(119, 407);
this.statusDropdown.Name = "statusDropdown";
this.statusDropdown.Size = new System.Drawing.Size(200, 21);
this.statusDropdown.TabIndex = 27;
//
// grantWriterDropdown
//
this.grantWriterDropdown.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList;
this.grantWriterDropdown.FormattingEnabled = true;
this.grantWriterDropdown.Location = new System.Drawing.Point(119, 552);
this.grantWriterDropdown.Name = "grantWriterDropdown";
this.grantWriterDropdown.Size = new System.Drawing.Size(200, 21);
this.grantWriterDropdown.TabIndex = 35;
//
// grantorDropdown
//
this.grantorDropdown.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList;
this.grantorDropdown.FormattingEnabled = true;
this.grantorDropdown.Location = new System.Drawing.Point(78, 69);
this.grantorDropdown.Name = "grantorDropdown";
this.grantorDropdown.Size = new System.Drawing.Size(241, 21);
this.grantorDropdown.TabIndex = 5;
//
// AddGrant
//
this.AcceptButton = this.grantSubmitButton;
this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.AutoValidate = System.Windows.Forms.AutoValidate.EnableAllowFocusChange;
this.CancelButton = this.btnCancel;
this.ClientSize = new System.Drawing.Size(834, 617);
this.Controls.Add(this.grantNotesText);
this.Controls.Add(this.emphasisText);
this.Controls.Add(this.deleteDocTypeButton);
this.Controls.Add(this.addDocTypeButton);
this.Controls.Add(this.deleteProgramButton);
this.Controls.Add(this.addProgramButton);
this.Controls.Add(this.deleteProjectButton);
this.Controls.Add(this.addProjectButton);
this.Controls.Add(this.editAttachmentButton);
this.Controls.Add(this.addAttachmentButton);
this.Controls.Add(this.attachmentsList);
this.Controls.Add(this.label19);
this.Controls.Add(this.grantSubmitButton);
this.Controls.Add(this.btnCancel);
this.Controls.Add(this.grantorWebsiteText);
this.Controls.Add(this.label18);
this.Controls.Add(this.cbPayment);
this.Controls.Add(this.cbStart);
this.Controls.Add(this.cbTurnAround);
this.Controls.Add(this.cbDue);
this.Controls.Add(this.cbSubmit);
this.Controls.Add(this.label17);
this.Controls.Add(this.requirementsList);
this.Controls.Add(this.projectsList);
this.Controls.Add(this.label16);
this.Controls.Add(this.label11);
this.Controls.Add(this.programsList);
this.Controls.Add(this.statusDropdown);
this.Controls.Add(this.label15);
this.Controls.Add(this.grantWriterDropdown);
this.Controls.Add(this.grantorPasswordText);
this.Controls.Add(this.grantorUsernameText);
this.Controls.Add(this.label14);
this.Controls.Add(this.label13);
this.Controls.Add(this.label12);
this.Controls.Add(this.label10);
this.Controls.Add(this.label9);
this.Controls.Add(this.paymentDatePicker);
this.Controls.Add(this.startDatePicker);
this.Controls.Add(this.turnaroundPicker);
this.Controls.Add(this.dueDatePicker);
this.Controls.Add(this.label8);
this.Controls.Add(this.label7);
this.Controls.Add(this.label4);
this.Controls.Add(this.label3);
this.Controls.Add(this.submitDatePicker);
this.Controls.Add(this.label2);
this.Controls.Add(this.grantDescText);
this.Controls.Add(this.label1);
this.Controls.Add(this.actualAmountText);
this.Controls.Add(this.label6);
this.Controls.Add(this.requestedAmountText);
this.Controls.Add(this.label5);
this.Controls.Add(this.addGrantorButton);
this.Controls.Add(this.grantNameText);
this.Controls.Add(this.lblName);
this.Controls.Add(this.lblGrantor);
this.Controls.Add(this.grantorDropdown);
this.Name = "AddGrant";
this.StartPosition = System.Windows.Forms.FormStartPosition.CenterParent;
this.Text = "Grant Editor";
this.Load += new System.EventHandler(this.AddGrant_Load);
this.ResumeLayout(false);
this.PerformLayout();
}
#endregion
private System.Windows.Forms.Label lblGrantor;
private System.Windows.Forms.Button btnCancel;
private System.Windows.Forms.Button grantSubmitButton;
private System.Windows.Forms.Label lblName;
private System.Windows.Forms.TextBox grantNameText;
private System.Windows.Forms.Button addGrantorButton;
private BetterComboBox grantorDropdown;
private System.Windows.Forms.Label label5;
private System.Windows.Forms.TextBox requestedAmountText;
private System.Windows.Forms.Label label6;
private System.Windows.Forms.TextBox actualAmountText;
private System.Windows.Forms.Label label1;
private System.Windows.Forms.TextBox grantDescText;
private System.Windows.Forms.Label label2;
private System.Windows.Forms.DateTimePicker submitDatePicker;
private System.Windows.Forms.Label label3;
private System.Windows.Forms.Label label4;
private System.Windows.Forms.Label label7;
private System.Windows.Forms.Label label8;
private System.Windows.Forms.DateTimePicker dueDatePicker;
private System.Windows.Forms.DateTimePicker turnaroundPicker;
private System.Windows.Forms.DateTimePicker startDatePicker;
private System.Windows.Forms.DateTimePicker paymentDatePicker;
private System.Windows.Forms.Label label9;
private System.Windows.Forms.Label label10;
private System.Windows.Forms.Label label14;
private System.Windows.Forms.Label label13;
private System.Windows.Forms.Label label12;
private System.Windows.Forms.TextBox grantorUsernameText;
private System.Windows.Forms.TextBox grantorPasswordText;
private BetterComboBox grantWriterDropdown;
private System.Windows.Forms.Label label15;
private BetterComboBox statusDropdown;
private System.Windows.Forms.ListBox programsList;
private System.Windows.Forms.Label label11;
private System.Windows.Forms.Label label16;
private System.Windows.Forms.ListBox projectsList;
private System.Windows.Forms.ListBox requirementsList;
private System.Windows.Forms.Label label17;
private System.Windows.Forms.CheckBox cbSubmit;
private System.Windows.Forms.CheckBox cbDue;
private System.Windows.Forms.CheckBox cbTurnAround;
private System.Windows.Forms.CheckBox cbStart;
private System.Windows.Forms.CheckBox cbPayment;
private System.Windows.Forms.TextBox grantorWebsiteText;
private System.Windows.Forms.Label label18;
private System.Windows.Forms.Label label19;
private System.Windows.Forms.ListBox attachmentsList;
private System.Windows.Forms.Button addAttachmentButton;
private System.Windows.Forms.Button editAttachmentButton;
private System.Windows.Forms.Button addProjectButton;
private System.Windows.Forms.Button deleteProjectButton;
private System.Windows.Forms.Button deleteProgramButton;
private System.Windows.Forms.Button addProgramButton;
private System.Windows.Forms.Button deleteDocTypeButton;
private System.Windows.Forms.Button addDocTypeButton;
private TextBoxWithExpandButton emphasisText;
private TextBoxWithExpandButton grantNotesText;
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.Net.Security;
using System.Runtime.InteropServices;
using System.Security.Authentication.ExtendedProtection;
namespace System.Net
{
internal partial class NTAuthentication
{
private bool _isServer;
private SafeFreeCredentials _credentialsHandle;
private SafeDeleteContext _securityContext;
private string _spn;
private int _tokenSize;
private ContextFlagsPal _requestedContextFlags;
private ContextFlagsPal _contextFlags;
private bool _isCompleted;
private string _package;
private string _lastProtocolName;
private string _protocolName;
private string _clientSpecifiedSpn;
private ChannelBinding _channelBinding;
// If set, no more calls should be made.
internal bool IsCompleted => _isCompleted;
internal bool IsValidContext => !(_securityContext == null || _securityContext.IsInvalid);
internal string Package => _package;
// True indicates this instance is for Server and will use AcceptSecurityContext SSPI API.
internal bool IsServer => _isServer;
internal string ClientSpecifiedSpn
{
get
{
if (_clientSpecifiedSpn == null)
{
_clientSpecifiedSpn = GetClientSpecifiedSpn();
}
return _clientSpecifiedSpn;
}
}
internal string ProtocolName
{
get
{
// Note: May return string.Empty if the auth is not done yet or failed.
if (_protocolName == null)
{
string negotiationAuthenticationPackage = null;
if (IsValidContext)
{
negotiationAuthenticationPackage = NegotiateStreamPal.QueryContextAuthenticationPackage(_securityContext);
if (IsCompleted)
{
_protocolName = negotiationAuthenticationPackage;
}
}
return negotiationAuthenticationPackage ?? string.Empty;
}
return _protocolName;
}
}
internal bool IsKerberos
{
get
{
if (_lastProtocolName == null)
{
_lastProtocolName = ProtocolName;
}
return (object)_lastProtocolName == (object)NegotiationInfoClass.Kerberos;
}
}
//
// This overload does not attempt to impersonate because the caller either did it already or the original thread context is still preserved.
//
internal NTAuthentication(bool isServer, string package, NetworkCredential credential, string spn, ContextFlagsPal requestedContextFlags, ChannelBinding channelBinding)
{
Initialize(isServer, package, credential, spn, requestedContextFlags, channelBinding);
}
private void Initialize(bool isServer, string package, NetworkCredential credential, string spn, ContextFlagsPal requestedContextFlags, ChannelBinding channelBinding)
{
if (NetEventSource.IsEnabled) NetEventSource.Enter(this, package, spn, requestedContextFlags);
_tokenSize = NegotiateStreamPal.QueryMaxTokenSize(package);
_isServer = isServer;
_spn = spn;
_securityContext = null;
_requestedContextFlags = requestedContextFlags;
_package = package;
_channelBinding = channelBinding;
if (NetEventSource.IsEnabled) NetEventSource.Info(this, $"Peer SPN-> '{_spn}'");
//
// Check if we're using DefaultCredentials.
//
Debug.Assert(CredentialCache.DefaultCredentials == CredentialCache.DefaultNetworkCredentials);
if (credential == CredentialCache.DefaultCredentials)
{
if (NetEventSource.IsEnabled) NetEventSource.Info(this, "using DefaultCredentials");
_credentialsHandle = NegotiateStreamPal.AcquireDefaultCredential(package, _isServer);
}
else
{
_credentialsHandle = NegotiateStreamPal.AcquireCredentialsHandle(package, _isServer, credential);
}
}
internal SafeDeleteContext GetContext(out SecurityStatusPal status)
{
status = new SecurityStatusPal(SecurityStatusPalErrorCode.OK);
if (!(IsCompleted && IsValidContext))
{
NetEventSource.Fail(this, "Should be called only when completed with success, currently is not!");
}
if (!IsServer)
{
NetEventSource.Fail(this, "The method must not be called by the client side!");
}
if (!IsValidContext)
{
status = new SecurityStatusPal(SecurityStatusPalErrorCode.InvalidHandle);
return null;
}
return _securityContext;
}
internal void CloseContext()
{
if (_securityContext != null && !_securityContext.IsClosed)
{
_securityContext.Dispose();
}
}
internal int VerifySignature(byte[] buffer, int offset, int count)
{
return NegotiateStreamPal.VerifySignature(_securityContext, buffer, offset, count);
}
internal int MakeSignature(byte[] buffer, int offset, int count, ref byte[] output)
{
return NegotiateStreamPal.MakeSignature(_securityContext, buffer, offset, count, ref output);
}
internal string GetOutgoingBlob(string incomingBlob)
{
byte[] decodedIncomingBlob = null;
if (incomingBlob != null && incomingBlob.Length > 0)
{
decodedIncomingBlob = Convert.FromBase64String(incomingBlob);
}
byte[] decodedOutgoingBlob = null;
if ((IsValidContext || IsCompleted) && decodedIncomingBlob == null)
{
// we tried auth previously, now we got a null blob, we're done. this happens
// with Kerberos & valid credentials on the domain but no ACLs on the resource
_isCompleted = true;
}
else
{
SecurityStatusPal statusCode;
decodedOutgoingBlob = GetOutgoingBlob(decodedIncomingBlob, true, out statusCode);
}
string outgoingBlob = null;
if (decodedOutgoingBlob != null && decodedOutgoingBlob.Length > 0)
{
outgoingBlob = Convert.ToBase64String(decodedOutgoingBlob);
}
if (IsCompleted)
{
CloseContext();
}
return outgoingBlob;
}
internal byte[] GetOutgoingBlob(byte[] incomingBlob, bool thrownOnError)
{
SecurityStatusPal statusCode;
return GetOutgoingBlob(incomingBlob, thrownOnError, out statusCode);
}
// Accepts an incoming binary security blob and returns an outgoing binary security blob.
internal byte[] GetOutgoingBlob(byte[] incomingBlob, bool throwOnError, out SecurityStatusPal statusCode)
{
if (NetEventSource.IsEnabled) NetEventSource.Enter(this, incomingBlob);
var result = new byte[_tokenSize];
bool firstTime = _securityContext == null;
try
{
if (!_isServer)
{
// client session
statusCode = NegotiateStreamPal.InitializeSecurityContext(
ref _credentialsHandle,
ref _securityContext,
_spn,
_requestedContextFlags,
incomingBlob,
_channelBinding,
ref result,
ref _contextFlags);
if (NetEventSource.IsEnabled) NetEventSource.Info(this, $"SSPIWrapper.InitializeSecurityContext() returns statusCode:0x{((int)statusCode.ErrorCode):x8} ({statusCode})");
if (statusCode.ErrorCode == SecurityStatusPalErrorCode.CompleteNeeded)
{
statusCode = NegotiateStreamPal.CompleteAuthToken(ref _securityContext, result);
if (NetEventSource.IsEnabled) NetEventSource.Info(this, $"SSPIWrapper.CompleteAuthToken() returns statusCode:0x{((int)statusCode.ErrorCode):x8} ({statusCode})");
result = null;
}
}
else
{
// Server session.
statusCode = NegotiateStreamPal.AcceptSecurityContext(
_credentialsHandle,
ref _securityContext,
_requestedContextFlags,
incomingBlob,
_channelBinding,
ref result,
ref _contextFlags);
if (NetEventSource.IsEnabled) NetEventSource.Info(this, $"SSPIWrapper.AcceptSecurityContext() returns statusCode:0x{((int)statusCode.ErrorCode):x8} ({statusCode})");
}
}
finally
{
//
// Assuming the ISC or ASC has referenced the credential on the first successful call,
// we want to decrement the effective ref count by "disposing" it.
// The real dispose will happen when the security context is closed.
// Note if the first call was not successful the handle is physically destroyed here.
//
if (firstTime)
{
_credentialsHandle?.Dispose();
}
}
if (((int)statusCode.ErrorCode >= (int)SecurityStatusPalErrorCode.OutOfMemory))
{
CloseContext();
_isCompleted = true;
if (throwOnError)
{
Exception exception = NegotiateStreamPal.CreateExceptionFromError(statusCode);
if (NetEventSource.IsEnabled) NetEventSource.Exit(this, exception);
throw exception;
}
if (NetEventSource.IsEnabled) NetEventSource.Exit(this, $"null statusCode:0x{((int)statusCode.ErrorCode):x8} ({statusCode})");
return null;
}
else if (firstTime && _credentialsHandle != null)
{
// Cache until it is pushed out by newly incoming handles.
SSPIHandleCache.CacheCredential(_credentialsHandle);
}
// The return value will tell us correctly if the handshake is over or not
if (statusCode.ErrorCode == SecurityStatusPalErrorCode.OK)
{
// Success.
_isCompleted = true;
}
else if (NetEventSource.IsEnabled)
{
// We need to continue.
if (NetEventSource.IsEnabled) NetEventSource.Info(this, $"need continue statusCode:0x{((int)statusCode.ErrorCode):x8} ({statusCode}) _securityContext:{_securityContext}");
}
if (NetEventSource.IsEnabled)
{
if (NetEventSource.IsEnabled) NetEventSource.Exit(this, $"IsCompleted: {IsCompleted}");
}
return result;
}
private string GetClientSpecifiedSpn()
{
if (!(IsValidContext && IsCompleted))
{
NetEventSource.Fail(this, "Trying to get the client SPN before handshaking is done!");
}
string spn = NegotiateStreamPal.QueryContextClientSpecifiedSpn(_securityContext);
if (NetEventSource.IsEnabled) NetEventSource.Info(this, $"The client specified SPN is [{spn}]");
return spn;
}
}
}
| |
/*
*
* (c) Copyright Ascensio System Limited 2010-2021
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#region Import
using System;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Linq;
using ASC.Collections;
using ASC.Common.Data.Sql;
using ASC.Common.Data.Sql.Expressions;
using ASC.CRM.Core.Entities;
using ASC.Web.CRM.Classes;
using ASC.Web.CRM.Resources;
#endregion
namespace ASC.CRM.Core.Dao
{
public class CachedListItem : ListItemDao
{
#region Members
private readonly HttpRequestDictionary<ListItem> _listItemCache = new HttpRequestDictionary<ListItem>("crm_list_item");
#endregion
#region Constructor
public CachedListItem(int tenantID)
: base(tenantID)
{
}
#endregion
#region Members
public override void ChangeColor(int id, string newColor)
{
ResetCache(id);
base.ChangeColor(id, newColor);
}
public override void DeleteItem(ListType listType, int itemID, int toItemID)
{
ResetCache(itemID);
base.DeleteItem(listType, itemID, toItemID);
}
public override void ChangePicture(int id, string newPicture)
{
ResetCache(id);
base.ChangePicture(id, newPicture);
}
public override void EditItem(ListType listType, ListItem enumItem)
{
ResetCache(enumItem.ID);
base.EditItem(listType, enumItem);
}
public override void ReorderItems(ListType listType, string[] titles)
{
_listItemCache.Clear();
base.ReorderItems(listType, titles);
}
public override ListItem GetByID(int id)
{
return _listItemCache.Get(id.ToString(), () => GetByIDBase(id));
}
private ListItem GetByIDBase(int id)
{
return base.GetByID(id);
}
private void ResetCache(int id)
{
_listItemCache.Reset(id.ToString());
}
#endregion
}
public class ListItemDao : AbstractDao
{
#region Constructor
public ListItemDao(int tenantID)
: base(tenantID)
{
}
#endregion
public bool IsExist(ListType listType, String title)
{
var q = new SqlQuery("crm_list_item")
.Select("1")
.Where("tenant_id", TenantID)
.Where("list_type", (int)listType)
.Where("title", title)
.SetMaxResults(1);
return Db.ExecuteScalar<bool>(q);
}
public bool IsExist(int id)
{
return Db.ExecuteScalar<bool>("select exists(select 1 from crm_list_item where tenant_id = @tid and id = @id)",
new { tid = TenantID, id = id });
}
public List<ListItem> GetItems()
{
var sqlQuery = GetListItemSqlQuery(null).OrderBy("sort_order", true);
return Db.ExecuteList(sqlQuery).ConvertAll(ToListItem);
}
public List<ListItem> GetItems(ListType listType)
{
var sqlQuery = GetListItemSqlQuery(Exp.Eq("list_type", (int)listType))
.OrderBy("sort_order", true);
return Db.ExecuteList(sqlQuery).ConvertAll(ToListItem);
}
public int GetItemsCount(ListType listType)
{
SqlQuery sqlQuery = Query("crm_list_item").SelectCount().Where(Exp.Eq("list_type", (int)listType))
.OrderBy("sort_order", true);
return Db.ExecuteScalar<int>(sqlQuery);
}
public ListItem GetSystemListItem(int id)
{
switch (id)
{
case (int)HistoryCategorySystem.TaskClosed:
return new ListItem
{
ID = -1,
Title = HistoryCategorySystem.TaskClosed.ToLocalizedString(),
AdditionalParams = "event_category_close.png"
};
case (int)HistoryCategorySystem.FilesUpload:
return new ListItem
{
ID = -2,
Title = HistoryCategorySystem.FilesUpload.ToLocalizedString(),
AdditionalParams = "event_category_attach_file.png"
};
case (int)HistoryCategorySystem.MailMessage:
return new ListItem
{
ID = -3,
Title = HistoryCategorySystem.MailMessage.ToLocalizedString(),
AdditionalParams = "event_category_email.png"
};
default:
return null;
}
}
public List<ListItem> GetSystemItems()
{
return new List<ListItem>
{
new ListItem
{
ID = (int)HistoryCategorySystem.TaskClosed,
Title = HistoryCategorySystem.TaskClosed.ToLocalizedString(),
AdditionalParams = "event_category_close.png"
},
new ListItem
{
ID = (int)HistoryCategorySystem.FilesUpload,
Title = HistoryCategorySystem.FilesUpload.ToLocalizedString(),
AdditionalParams = "event_category_attach_file.png"
},
new ListItem
{
ID =(int)HistoryCategorySystem.MailMessage,
Title = HistoryCategorySystem.MailMessage.ToLocalizedString(),
AdditionalParams = "event_category_email.png"
}
};
}
public virtual ListItem GetByID(int id)
{
if (id < 0) return GetSystemListItem(id);
var result = Db.ExecuteList(GetListItemSqlQuery(Exp.Eq("id", id))).ConvertAll(ToListItem);
return result.Count > 0 ? result[0] : null;
}
public virtual List<ListItem> GetItems(int[] id)
{
var sqlResult = Db.ExecuteList(GetListItemSqlQuery(Exp.In("id", id))).ConvertAll(ToListItem);
var systemItem = id.Where(item => item < 0).Select(GetSystemListItem);
return systemItem.Any() ? sqlResult.Union(systemItem).ToList() : sqlResult;
}
public virtual List<ListItem> GetAll()
{
return Db.ExecuteList(GetListItemSqlQuery(null)).ConvertAll(ToListItem);
}
public virtual void ChangeColor(int id, string newColor)
{
Db.ExecuteNonQuery(Update("crm_list_item")
.Set("color", newColor)
.Where(Exp.Eq("id", id)));
}
public NameValueCollection GetColors(ListType listType)
{
var where = Exp.Eq("list_type", (int)listType);
var result = new NameValueCollection();
Db.ExecuteList(Query("crm_list_item")
.Select("id", "color")
.Where(where))
.ForEach(row => result.Add(row[0].ToString(), row[1].ToString()));
return result;
}
public ListItem GetByTitle(ListType listType, string title)
{
var result = Db.ExecuteList(GetListItemSqlQuery(Exp.Eq("title", title) & Exp.Eq("list_type", (int)listType))).ConvertAll(ToListItem);
return result.Count > 0 ? result[0] : null;
}
public int GetRelativeItemsCount(ListType listType, int id)
{
SqlQuery sqlQuery;
switch (listType)
{
case ListType.ContactStatus:
sqlQuery = Query("crm_contact")
.Select("count(*)")
.Where(Exp.Eq("status_id", id));
break;
case ListType.ContactType:
sqlQuery = Query("crm_contact")
.Select("count(*)")
.Where(Exp.Eq("contact_type_id", id));
break;
case ListType.TaskCategory:
sqlQuery = Query("crm_task")
.Select("count(*)")
.Where(Exp.Eq("category_id", id));
break;
case ListType.HistoryCategory:
sqlQuery = Query("crm_relationship_event")
.Select("count(*)")
.Where(Exp.Eq("category_id", id));
break;
default:
throw new ArgumentException();
}
return Db.ExecuteScalar<int>(sqlQuery);
}
public Dictionary<int, int> GetRelativeItemsCount(ListType listType)
{
var sqlQuery = Query("crm_list_item tbl_list_item")
.Where(Exp.Eq("tbl_list_item.list_type", (int)listType))
.Select("tbl_list_item.id")
.OrderBy("tbl_list_item.sort_order", true)
.GroupBy("tbl_list_item.id");
switch (listType)
{
case ListType.ContactStatus:
sqlQuery.LeftOuterJoin("crm_contact tbl_crm_contact",
Exp.EqColumns("tbl_list_item.id", "tbl_crm_contact.status_id")
& Exp.EqColumns("tbl_list_item.tenant_id", "tbl_crm_contact.tenant_id"))
.Select("count(tbl_crm_contact.status_id)");
break;
case ListType.ContactType:
sqlQuery.LeftOuterJoin("crm_contact tbl_crm_contact",
Exp.EqColumns("tbl_list_item.id", "tbl_crm_contact.contact_type_id")
& Exp.EqColumns("tbl_list_item.tenant_id", "tbl_crm_contact.tenant_id"))
.Select("count(tbl_crm_contact.contact_type_id)");
break;
case ListType.TaskCategory:
sqlQuery.LeftOuterJoin("crm_task tbl_crm_task",
Exp.EqColumns("tbl_list_item.id", "tbl_crm_task.category_id")
& Exp.EqColumns("tbl_list_item.tenant_id", "tbl_crm_task.tenant_id"))
.Select("count(tbl_crm_task.category_id)");
break;
case ListType.HistoryCategory:
sqlQuery.LeftOuterJoin("crm_relationship_event tbl_crm_relationship_event",
Exp.EqColumns("tbl_list_item.id", "tbl_crm_relationship_event.category_id")
& Exp.EqColumns("tbl_list_item.tenant_id", "tbl_crm_relationship_event.tenant_id"))
.Select("count(tbl_crm_relationship_event.category_id)");
break;
default:
throw new ArgumentException();
}
var queryResult = Db.ExecuteList(sqlQuery);
return queryResult.ToDictionary(x => Convert.ToInt32(x[0]), y => Convert.ToInt32(y[1]));
}
public virtual int CreateItem(ListType listType, ListItem enumItem)
{
if (IsExist(listType, enumItem.Title))
return GetByTitle(listType, enumItem.Title).ID;
if (string.IsNullOrEmpty(enumItem.Title))
throw new ArgumentException();
if (listType == ListType.TaskCategory || listType == ListType.HistoryCategory)
if (string.IsNullOrEmpty(enumItem.AdditionalParams))
throw new ArgumentException();
else
enumItem.AdditionalParams = System.IO.Path.GetFileName(enumItem.AdditionalParams);
if (listType == ListType.ContactStatus)
if (string.IsNullOrEmpty(enumItem.Color))
throw new ArgumentException();
var sortOrder = enumItem.SortOrder;
if (sortOrder == 0)
sortOrder = Db.ExecuteScalar<int>(Query("crm_list_item")
.Where(Exp.Eq("list_type", (int)listType))
.SelectMax("sort_order")) + 1;
return Db.ExecuteScalar<int>(
Insert("crm_list_item")
.InColumnValue("id", 0)
.InColumnValue("list_type", (int)listType)
.InColumnValue("description", enumItem.Description)
.InColumnValue("title", enumItem.Title)
.InColumnValue("additional_params", enumItem.AdditionalParams)
.InColumnValue("color", enumItem.Color)
.InColumnValue("sort_order", sortOrder)
.Identity(1, 0, true));
}
public virtual void EditItem(ListType listType, ListItem enumItem)
{
if (HaveRelativeItemsLink(listType, enumItem.ID))
switch (listType)
{
case ListType.ContactStatus:
case ListType.ContactType:
throw new ArgumentException(string.Format("{0}. {1}.", CRMErrorsResource.BasicCannotBeEdited, CRMErrorsResource.HasRelatedContacts));
case ListType.TaskCategory:
throw new ArgumentException(string.Format("{0}. {1}.", CRMErrorsResource.BasicCannotBeEdited, CRMErrorsResource.TaskCategoryHasRelatedTasks));
case ListType.HistoryCategory:
throw new ArgumentException(string.Format("{0}. {1}.", CRMErrorsResource.BasicCannotBeEdited, CRMErrorsResource.HistoryCategoryHasRelatedEvents));
default:
throw new ArgumentException(string.Format("{0}.", CRMErrorsResource.BasicCannotBeEdited));
}
Db.ExecuteNonQuery(Update("crm_list_item")
.Set("description", enumItem.Description)
.Set("title", enumItem.Title)
.Set("additional_params", enumItem.AdditionalParams)
.Set("color", enumItem.Color)
.Where(Exp.Eq("id", enumItem.ID)));
}
public virtual void ChangePicture(int id, String newPicture)
{
Db.ExecuteNonQuery(Update("crm_list_item")
.Set("additional_params", newPicture)
.Where(Exp.Eq("id", id)));
}
private bool HaveRelativeItemsLink(ListType listType, int itemID)
{
SqlQuery sqlQuery;
switch (listType)
{
case ListType.ContactStatus:
sqlQuery = Query("crm_contact")
.Where(Exp.Eq("status_id", itemID));
break;
case ListType.ContactType:
sqlQuery = Query("crm_contact")
.Where(Exp.Eq("contact_type_id", itemID));
break;
case ListType.TaskCategory:
sqlQuery = Query("crm_task")
.Where(Exp.Eq("category_id", itemID));
break;
case ListType.HistoryCategory:
sqlQuery = Query("crm_relationship_event")
.Where(Exp.Eq("category_id", itemID));
break;
default:
throw new ArgumentException();
}
return Db.ExecuteScalar<int>(sqlQuery.SelectCount()) > 0;
}
public void ChangeRelativeItemsLink(ListType listType, int fromItemID, int toItemID)
{
if (!IsExist(fromItemID))
throw new ArgumentException("", "toItemID");
if (!HaveRelativeItemsLink(listType, fromItemID)) return;
if (!IsExist(toItemID))
throw new ArgumentException("", "toItemID");
SqlUpdate sqlUpdate;
switch (listType)
{
case ListType.ContactStatus:
sqlUpdate = Update("crm_contact")
.Set("status_id", toItemID)
.Where(Exp.Eq("status_id", fromItemID));
break;
case ListType.ContactType:
sqlUpdate = Update("crm_contact")
.Set("contact_type_id", toItemID)
.Where(Exp.Eq("contact_type_id", fromItemID));
break;
case ListType.TaskCategory:
sqlUpdate = Update("crm_task")
.Set("category_id", toItemID)
.Where(Exp.Eq("category_id", fromItemID));
break;
case ListType.HistoryCategory:
sqlUpdate = Update("crm_relationship_event")
.Set("category_id", toItemID)
.Where(Exp.Eq("category_id", fromItemID));
break;
default:
throw new ArgumentException();
}
Db.ExecuteNonQuery(sqlUpdate);
}
public virtual void DeleteItem(ListType listType, int itemID, int toItemID)
{
if (HaveRelativeItemsLink(listType, itemID))
{
switch (listType)
{
case ListType.ContactStatus:
case ListType.ContactType:
throw new ArgumentException(string.Format("{0}. {1}.", CRMErrorsResource.BasicCannotBeDeleted, CRMErrorsResource.HasRelatedContacts));
case ListType.TaskCategory:
var exMsg = string.Format("{0}. {1}.", CRMErrorsResource.BasicCannotBeDeleted, CRMErrorsResource.TaskCategoryHasRelatedTasks);
if (itemID == toItemID) throw new ArgumentException(exMsg);
ChangeRelativeItemsLink(listType, itemID, toItemID);
break;
case ListType.HistoryCategory:
throw new ArgumentException(string.Format("{0}. {1}.", CRMErrorsResource.BasicCannotBeDeleted, CRMErrorsResource.HistoryCategoryHasRelatedEvents));
default:
throw new ArgumentException(string.Format("{0}.", CRMErrorsResource.BasicCannotBeDeleted));
}
}
Db.ExecuteNonQuery(Delete("crm_list_item").Where(Exp.Eq("id", itemID) & Exp.Eq("list_type", (int)listType)));
}
public virtual void ReorderItems(ListType listType, String[] titles)
{
using (var tx = Db.BeginTransaction())
{
for (int index = 0; index < titles.Length; index++)
Db.ExecuteNonQuery(Update("crm_list_item")
.Set("sort_order", index)
.Where(Exp.Eq("title", titles[index]) & Exp.Eq("list_type", (int)listType)));
tx.Commit();
}
}
private SqlQuery GetListItemSqlQuery(Exp where)
{
var result = Query("crm_list_item")
.Select(
"id",
"title",
"description",
"color",
"sort_order",
"additional_params",
"list_type"
);
if (where != null)
result.Where(where);
return result;
}
public static ListItem ToListItem(object[] row)
{
var result = new ListItem
{
ID = Convert.ToInt32(row[0]),
Title = Convert.ToString(row[1]),
Description = Convert.ToString(row[2]),
Color = Convert.ToString(row[3]),
SortOrder = Convert.ToInt32(row[4]),
AdditionalParams = Convert.ToString(row[5])
};
ListType listType;
if (Enum.TryParse(Convert.ToString(row[6]), out listType))
{
result.ListType = listType;
}
return result;
}
}
}
| |
#region BSD License
/*
Copyright (c) 2004-2005 Matthew Holmes ([email protected]), Dan Moorehead ([email protected])
Redistribution and use in source and binary forms, with or without modification, are permitted
provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions
and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions
and the following disclaimer in the documentation and/or other materials provided with the
distribution.
* The name of the author may not be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#endregion
using System;
using System.Collections.Generic;
using System.Xml;
using Prebuild.Core.Attributes;
using Prebuild.Core.Interfaces;
using System.IO;
namespace Prebuild.Core.Nodes
{
/// <summary>
///
/// </summary>
[DataNode("Files")]
public class FilesNode : DataNode
{
#region Fields
private readonly List<string> m_Files = new List<string>();
private readonly Dictionary<string,BuildAction> m_BuildActions = new Dictionary<string, BuildAction>();
private readonly Dictionary<string, SubType> m_SubTypes = new Dictionary<string, SubType>();
private readonly Dictionary<string, string> m_ResourceNames = new Dictionary<string, string>();
private readonly Dictionary<string, CopyToOutput> m_CopyToOutputs = new Dictionary<string, CopyToOutput>();
private readonly Dictionary<string, bool> m_Links = new Dictionary<string, bool>();
private readonly Dictionary<string, string> m_LinkPaths = new Dictionary<string, string>();
private readonly Dictionary<string, bool> m_PreservePaths = new Dictionary<string, bool>();
#endregion
#region Properties
public int Count
{
get
{
return m_Files.Count;
}
}
#endregion
#region Public Methods
public BuildAction GetBuildAction(string file)
{
if(!m_BuildActions.ContainsKey(file))
{
return BuildAction.Compile;
}
return m_BuildActions[file];
}
public CopyToOutput GetCopyToOutput(string file)
{
if (!m_CopyToOutputs.ContainsKey(file))
{
return CopyToOutput.Never;
}
return m_CopyToOutputs[file];
}
public bool GetIsLink(string file)
{
if (!m_Links.ContainsKey(file))
{
return false;
}
return m_Links[file];
}
public bool Contains(string file)
{
return m_Files.Contains(file);
}
public string GetLinkPath( string file )
{
if ( !m_LinkPaths.ContainsKey( file ) )
{
return string.Empty;
}
return m_LinkPaths[ file ];
}
public SubType GetSubType(string file)
{
if(!m_SubTypes.ContainsKey(file))
{
return SubType.Code;
}
return m_SubTypes[file];
}
public string GetResourceName(string file)
{
if(!m_ResourceNames.ContainsKey(file))
{
return string.Empty;
}
return m_ResourceNames[file];
}
public bool GetPreservePath( string file )
{
if ( !m_PreservePaths.ContainsKey( file ) )
{
return false;
}
return m_PreservePaths[ file ];
}
public override void Parse(XmlNode node)
{
if( node == null )
{
throw new ArgumentNullException("node");
}
foreach(XmlNode child in node.ChildNodes)
{
IDataNode dataNode = Kernel.Instance.ParseNode(child, this);
if(dataNode is FileNode)
{
FileNode fileNode = (FileNode)dataNode;
if(fileNode.IsValid)
{
if (!m_Files.Contains(fileNode.Path))
{
m_Files.Add(fileNode.Path);
m_BuildActions[fileNode.Path] = fileNode.BuildAction;
m_SubTypes[fileNode.Path] = fileNode.SubType;
m_ResourceNames[fileNode.Path] = fileNode.ResourceName;
m_PreservePaths[ fileNode.Path ] = fileNode.PreservePath;
m_Links[ fileNode.Path ] = fileNode.IsLink;
m_LinkPaths[ fileNode.Path ] = fileNode.LinkPath;
m_CopyToOutputs[ fileNode.Path ] = fileNode.CopyToOutput;
}
}
}
else if(dataNode is MatchNode)
{
foreach(string file in ((MatchNode)dataNode).Files)
{
MatchNode matchNode = (MatchNode)dataNode;
if (!m_Files.Contains(file))
{
m_Files.Add(file);
if (matchNode.BuildAction == null)
m_BuildActions[file] = GetBuildActionByFileName(file);
else
m_BuildActions[file] = matchNode.BuildAction.Value;
m_SubTypes[file] = matchNode.SubType == null ? GetSubTypeByFileName(file) : matchNode.SubType.Value;
m_ResourceNames[ file ] = matchNode.ResourceName;
m_PreservePaths[ file ] = matchNode.PreservePath;
m_Links[ file ] = matchNode.IsLink;
m_LinkPaths[ file ] = matchNode.LinkPath;
m_CopyToOutputs[ file ] = matchNode.CopyToOutput;
}
}
}
}
}
// TODO: Check in to why StringCollection's enumerator doesn't implement
// IEnumerator?
public IEnumerator<string> GetEnumerator()
{
return m_Files.GetEnumerator();
}
#endregion
}
}
| |
namespace Cronofy
{
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using Cronofy.Requests;
using Cronofy.Responses;
/// <summary>
/// Class for a Cronofy client that interacts with an account's calendars
/// and events.
/// </summary>
public sealed class CronofyAccountClient : CronofyAccessTokenClient, ICronofyAccountClient
{
/// <summary>
/// Initializes a new instance of the
/// <see cref="Cronofy.CronofyAccountClient"/> class.
/// </summary>
/// <param name="accessToken">
/// The access token for the OAuth authorization for the account, must
/// not be empty.
/// </param>
/// <exception cref="System.ArgumentException">
/// Thrown if <paramref name="accessToken"/> is null or empty.
/// </exception>
public CronofyAccountClient(string accessToken)
: base(accessToken)
{
}
/// <summary>
/// Initializes a new instance of the
/// <see cref="Cronofy.CronofyAccountClient"/> class.
/// </summary>
/// <param name="accessToken">
/// The access token for the OAuth authorization for the account, must
/// not be empty.
/// </param>
/// <param name="dataCenter">
/// The data center to use.
/// </param>
/// <exception cref="System.ArgumentException">
/// Thrown if <paramref name="accessToken"/> is <c>null</c> or
/// empty.
/// </exception>
public CronofyAccountClient(string accessToken, string dataCenter)
: base(accessToken, dataCenter)
{
}
/// <inheritdoc/>
public Account GetAccount()
{
var request = new HttpRequest
{
Method = "GET",
Url = this.UrlProvider.AccountUrl,
};
request.AddOAuthAuthorization(this.AccessToken);
var response = this.HttpClient.GetJsonResponse<AccountResponse>(request);
return response.ToAccount();
}
/// <inheritdoc/>
public IEnumerable<Profile> GetProfiles()
{
var request = new HttpRequest();
request.Method = "GET";
request.Url = this.UrlProvider.ProfilesUrl;
request.AddOAuthAuthorization(this.AccessToken);
var response = this.HttpClient.GetJsonResponse<ProfilesResponse>(request);
return response.Profiles.Select(p => p.ToProfile());
}
/// <inheritdoc/>
public IEnumerable<Calendar> GetCalendars()
{
var request = new HttpRequest();
request.Method = "GET";
request.Url = this.UrlProvider.CalendarsUrl;
request.AddOAuthAuthorization(this.AccessToken);
var calendarsResponse = this.HttpClient.GetJsonResponse<CalendarsResponse>(request);
return calendarsResponse.Calendars.Select(c => c.ToCalendar());
}
/// <inheritdoc/>
public Calendar CreateCalendar(string profileId, string name)
{
Preconditions.NotEmpty("profileId", profileId);
Preconditions.NotEmpty("name", name);
var calendarRequest = new CreateCalendarRequest
{
ProfileId = profileId,
Name = name,
};
return this.CreateCalendar(calendarRequest);
}
/// <inheritdoc/>
public Calendar CreateCalendar(string profileId, string name, string color)
{
Preconditions.NotEmpty("profileId", profileId);
Preconditions.NotEmpty("name", name);
Preconditions.NotEmpty("color", color);
var calendarRequest = new CreateCalendarRequest
{
ProfileId = profileId,
Name = name,
Color = color,
};
return this.CreateCalendar(calendarRequest);
}
/// <inheritdoc/>
public IEnumerable<Event> GetEvents()
{
var builder = new GetEventsRequestBuilder();
return this.GetEvents(builder);
}
/// <inheritdoc/>
public IEnumerable<Event> GetEvents(IBuilder<GetEventsRequest> builder)
{
Preconditions.NotNull("builder", builder);
var request = builder.Build();
return this.GetEvents(request);
}
/// <inheritdoc/>
public IEnumerable<Event> GetEvents(GetEventsRequest request)
{
Preconditions.NotNull("request", request);
var httpRequest = new HttpRequest();
httpRequest.Method = "GET";
httpRequest.Url = this.UrlProvider.EventsUrl;
httpRequest.AddOAuthAuthorization(this.AccessToken);
httpRequest.QueryString.Add("tzid", request.TimeZoneId);
httpRequest.QueryString.Add("localized_times", true);
httpRequest.QueryString.Add("from", request.From);
httpRequest.QueryString.Add("to", request.To);
httpRequest.QueryString.Add("last_modified", request.LastModified);
httpRequest.QueryString.Add("include_deleted", request.IncludeDeleted);
httpRequest.QueryString.Add("include_moved", request.IncludeMoved);
httpRequest.QueryString.Add("include_managed", request.IncludeManaged);
httpRequest.QueryString.Add("only_managed", request.OnlyManaged);
httpRequest.QueryString.Add("include_geo", request.IncludeGeo);
httpRequest.QueryString.Add("google_event_ids", request.GoogleEventIds);
httpRequest.QueryString.Add("calendar_ids[]", request.CalendarIds);
return new PagedResultsIterator<ReadEventsResponse, Event>(
this.HttpClient,
this.AccessToken,
httpRequest);
}
/// <inheritdoc/>
public IEnumerable<FreeBusy> GetFreeBusy()
{
var builder = new GetFreeBusyRequestBuilder();
return this.GetFreeBusy(builder);
}
/// <inheritdoc/>
public IEnumerable<FreeBusy> GetFreeBusy(IBuilder<GetFreeBusyRequest> builder)
{
Preconditions.NotNull("builder", builder);
var request = builder.Build();
return this.GetFreeBusy(request);
}
/// <inheritdoc/>
public IEnumerable<FreeBusy> GetFreeBusy(GetFreeBusyRequest request)
{
Preconditions.NotNull("request", request);
var httpRequest = new HttpRequest();
httpRequest.Method = "GET";
httpRequest.Url = this.UrlProvider.FreeBusyUrl;
httpRequest.AddOAuthAuthorization(this.AccessToken);
httpRequest.QueryString.Add("tzid", request.TimeZoneId);
httpRequest.QueryString.Add("localized_times", true);
httpRequest.QueryString.Add("from", request.From);
httpRequest.QueryString.Add("to", request.To);
httpRequest.QueryString.Add("include_managed", request.IncludeManaged);
httpRequest.QueryString.Add("calendar_ids[]", request.CalendarIds);
return new PagedResultsIterator<FreeBusyResponse, FreeBusy>(
this.HttpClient,
this.AccessToken,
httpRequest);
}
/// <inheritdoc/>
public BatchResponse BatchRequest(IBuilder<BatchRequest> batchBuilder)
{
Preconditions.NotNull("batchBuilder", batchBuilder);
var request = batchBuilder.Build();
return this.BatchRequest(request);
}
/// <inheritdoc/>
public BatchResponse BatchRequest(BatchRequest batchRequest)
{
var request = new HttpRequest();
request.Method = "POST";
request.Url = this.UrlProvider.BatchUrl;
request.AddOAuthAuthorization(this.AccessToken);
request.SetJsonBody(batchRequest);
var response = this.HttpClient.GetJsonResponse<BatchResponse>(request);
for (int i = 0; i < response.Batch.Length; i++)
{
response.Batch[i].Request = batchRequest.Batch[i];
}
if (response.HasErrors)
{
var message = string.Format("Batch contains {0} errors", response.Errors.Count);
throw new BatchWithErrorsException(message, response);
}
return response;
}
/// <inheritdoc/>
public void UpsertEvent(string calendarId, IBuilder<UpsertEventRequest> eventBuilder)
{
Preconditions.NotEmpty("calendarId", calendarId);
Preconditions.NotNull("eventBuilder", eventBuilder);
var request = eventBuilder.Build();
this.UpsertEvent(calendarId, request);
}
/// <inheritdoc/>
public void UpsertEvent(string calendarId, UpsertEventRequest eventRequest)
{
Preconditions.NotEmpty("calendarId", calendarId);
Preconditions.NotNull("eventRequest", eventRequest);
var request = new HttpRequest();
request.Method = "POST";
request.Url = string.Format(this.UrlProvider.ManagedEventUrlFormat, calendarId);
request.AddOAuthAuthorization(this.AccessToken);
request.SetJsonBody(eventRequest);
this.HttpClient.GetValidResponse(request);
}
/// <inheritdoc/>
public void DeleteEvent(string calendarId, string eventId)
{
Preconditions.NotEmpty("calendarId", calendarId);
Preconditions.NotEmpty("eventId", eventId);
var request = new HttpRequest();
request.Method = "DELETE";
request.Url = string.Format(this.UrlProvider.ManagedEventUrlFormat, calendarId);
request.AddOAuthAuthorization(this.AccessToken);
var requestBody = new DeleteEventRequest { EventId = eventId };
request.SetJsonBody(requestBody);
this.HttpClient.GetValidResponse(request);
}
/// <inheritdoc/>
public void DeleteAllEvents()
{
var request = new HttpRequest();
request.Method = "DELETE";
request.Url = this.UrlProvider.EventsUrl;
request.AddOAuthAuthorization(this.AccessToken);
var requestBody = new { delete_all = true };
request.SetJsonBody(requestBody);
var response = this.HttpClient.GetResponse(request);
if (response.Code != 202)
{
// TODO More useful exceptions for validation errors
throw new CronofyException("Request failed");
}
}
/// <inheritdoc/>
public void DeleteAllEventsForCalendars(params string[] calendarIds)
{
Preconditions.NotEmpty("calendarIds", calendarIds);
var request = new HttpRequest();
request.Method = "DELETE";
request.Url = this.UrlProvider.EventsUrl;
request.AddOAuthAuthorization(this.AccessToken);
var requestBody = new { calendar_ids = calendarIds };
request.SetJsonBody(requestBody);
var response = this.HttpClient.GetResponse(request);
if (response.Code != 202)
{
// TODO More useful exceptions for validation errors
throw new CronofyException("Request failed");
}
}
/// <inheritdoc/>
public void DeleteExternalEvent(string calendarId, string eventUid)
{
Preconditions.NotEmpty("calendarId", calendarId);
Preconditions.NotEmpty("eventUid", eventUid);
var request = new HttpRequest();
request.Method = "DELETE";
request.Url = string.Format(this.UrlProvider.ManagedEventUrlFormat, calendarId);
request.AddOAuthAuthorization(this.AccessToken);
var requestBody = new DeleteExternalEventRequest { EventUid = eventUid };
request.SetJsonBody(requestBody);
var response = this.HttpClient.GetResponse(request);
if (response.Code != 202)
{
throw new CronofyException("Request failed");
}
}
/// <inheritdoc/>
public void ChangeParticipationStatus(string calendarId, string eventUid, ParticipationStatus status)
{
Preconditions.NotEmpty("calendarId", calendarId);
Preconditions.NotEmpty("eventUid", eventUid);
var request = new HttpRequest();
request.Method = "POST";
request.Url = string.Format(this.UrlProvider.ParticipationStatusUrlFormat, calendarId, eventUid);
request.AddOAuthAuthorization(this.AccessToken);
var requestBody = new { status = status.ToString().ToLower() };
request.SetJsonBody(requestBody);
var response = this.HttpClient.GetResponse(request);
if (response.Code != 202)
{
throw new CronofyException("Request failed");
}
}
/// <inheritdoc/>
public ElevatedPermissionsResponse ElevatedPermissions(IBuilder<ElevatedPermissionsRequest> builder)
{
Preconditions.NotNull("builder", builder);
var request = builder.Build();
return this.ElevatedPermissions(request);
}
/// <inheritdoc/>
public ElevatedPermissionsResponse ElevatedPermissions(ElevatedPermissionsRequest permissionsRequest)
{
Preconditions.NotNull("permissionsRequesr", permissionsRequest);
var request = new HttpRequest();
request.Method = "POST";
request.Url = this.UrlProvider.PermissionsUrl;
request.AddOAuthAuthorization(this.AccessToken);
request.SetJsonBody(permissionsRequest);
var response = this.HttpClient.GetJsonResponse<Responses.ElevatedPermissionsResponse>(request);
return response.ToElevatedPermissions();
}
/// <inheritdoc/>
public Channel CreateChannel(string callbackUrl)
{
Preconditions.NotEmpty("callbackUrl", callbackUrl);
var builder = new CreateChannelBuilder()
.CallbackUrl(callbackUrl);
return this.CreateChannel(builder);
}
/// <inheritdoc/>
public Channel CreateChannel(IBuilder<CreateChannelRequest> channelBuilder)
{
Preconditions.NotNull("channelBuilder", channelBuilder);
var request = channelBuilder.Build();
return this.CreateChannel(request);
}
/// <inheritdoc/>
public Channel CreateChannel(CreateChannelRequest channelRequest)
{
Preconditions.NotNull("channelRequest", channelRequest);
var request = new HttpRequest();
request.Method = "POST";
request.Url = this.UrlProvider.ChannelsUrl;
request.AddOAuthAuthorization(this.AccessToken);
request.SetJsonBody(channelRequest);
var response = this.HttpClient.GetJsonResponse<ChannelResponse>(request);
return response.ToChannel();
}
/// <inheritdoc/>
public IEnumerable<Channel> GetChannels()
{
var request = new HttpRequest();
request.Method = "GET";
request.Url = this.UrlProvider.ChannelsUrl;
request.AddOAuthAuthorization(this.AccessToken);
var response = this.HttpClient.GetJsonResponse<ChannelsResponse>(request);
return response.Channels.Select(c => c.ToChannel());
}
/// <inheritdoc/>
public void CloseChannel(string channelId)
{
Preconditions.NotEmpty("channelId", channelId);
var request = new HttpRequest();
request.Method = "DELETE";
request.Url = string.Format(this.UrlProvider.ChannelUrlFormat, channelId);
request.AddOAuthAuthorization(this.AccessToken);
this.HttpClient.GetValidResponse(request);
}
/// <inheritdoc/>
public IEnumerable<AvailablePeriod> GetAvailability(IBuilder<AvailabilityRequest> builder)
{
Preconditions.NotNull("builder", builder);
var request = builder.Build();
return this.GetAvailability(request);
}
/// <inheritdoc/>
public IEnumerable<AvailablePeriod> GetAvailability(AvailabilityRequest availabilityRequest)
{
Preconditions.NotNull("availabilityRequest", availabilityRequest);
var request = new HttpRequest();
request.Method = "POST";
request.Url = this.UrlProvider.AvailabilityUrl;
request.AddOAuthAuthorization(this.AccessToken);
request.SetJsonBody(availabilityRequest);
var response = this.HttpClient.GetJsonResponse<AvailabilityResponse>(request);
return response.AvailablePeriods.Select(ap => ap.ToAvailablePeriod());
}
/// <inheritdoc/>
public AvailableSequences GetSequencedAvailability(IBuilder<SequencedAvailabilityRequest> builder)
{
Preconditions.NotNull("builder", builder);
var request = builder.Build();
return this.GetSequencedAvailability(request);
}
/// <inheritdoc/>
public AvailableSequences GetSequencedAvailability(SequencedAvailabilityRequest availabilityRequest)
{
Preconditions.NotNull("availabilityRequest", availabilityRequest);
var request = new HttpRequest();
request.Method = "POST";
request.Url = this.UrlProvider.SequencedAvailabilityUrl;
request.AddOAuthAuthorization(this.AccessToken);
request.SetJsonBody(availabilityRequest);
var response = this.HttpClient.GetJsonResponse<SequencedAvailabilityResponse>(request);
return response.ToSequence();
}
/// <inheritdoc/>
public string CreateLinkToken()
{
var request = new HttpRequest();
request.Method = "POST";
request.Url = this.UrlProvider.LinkTokensUrl;
request.AddOAuthAuthorization(this.AccessToken);
var response = this.HttpClient.GetJsonResponse<LinkTokenResponse>(request);
return response.LinkToken;
}
/// <inheritdoc/>
public void RevokeProfileAuthorization(string profileId)
{
Preconditions.NotEmpty("profileId", profileId);
var request = new HttpRequest();
request.Method = "POST";
request.Url = string.Format(this.UrlProvider.RevokeProfileAuthorizationUrlFormat, profileId);
request.AddOAuthAuthorization(this.AccessToken);
this.HttpClient.GetValidResponse(request);
}
/// <inheritdoc/>
public string GetConferencingServiceAuthorizationUrl(ConferencingServiceAuthorizationRequest conferencingServiceAuthorizationRequest)
{
Preconditions.NotNull(nameof(conferencingServiceAuthorizationRequest), conferencingServiceAuthorizationRequest);
Preconditions.NotBlank(nameof(conferencingServiceAuthorizationRequest.RedirectUri), conferencingServiceAuthorizationRequest.RedirectUri);
var request = new HttpRequest
{
Method = "POST",
Url = this.UrlProvider.ConferencingServiceAuthorizationUrl,
};
request.AddOAuthAuthorization(this.AccessToken);
request.SetJsonBody(conferencingServiceAuthorizationRequest);
var response = this.HttpClient.GetJsonResponse<ConferencingServiceAuthorizationResponse>(request);
return response.AuthorizationRequest.Url;
}
/// <summary>
/// Creates a calendar.
/// </summary>
/// <param name="calendarRequest">
/// The calendar request from which to make the calendar.
/// </param>
/// <returns>The created calendar.</returns>
private Calendar CreateCalendar(CreateCalendarRequest calendarRequest)
{
var request = new HttpRequest();
request.Method = "POST";
request.Url = this.UrlProvider.CalendarsUrl;
request.AddOAuthAuthorization(this.AccessToken);
request.SetJsonBody(calendarRequest);
var response = this.HttpClient.GetJsonResponse<CreateCalendarResponse>(request);
return response.ToCalendar();
}
/// <summary>
/// Iterator for a paged events response.
/// </summary>
/// <typeparam name="TResponse">
/// The type of response returned by the paged result set.
/// </typeparam>
/// <typeparam name="TResult">
/// The type of the items within the paged result set.
/// </typeparam>
internal sealed class PagedResultsIterator<TResponse, TResult> : IEnumerable<TResult>
where TResponse : IPagedResultsResponse<TResult>
{
/// <summary>
/// The HTTP client to perform requests with.
/// </summary>
private readonly IHttpClient httpClient;
/// <summary>
/// The access token for the OAuth authorization for the account.
/// </summary>
private readonly string accessToken;
/// <summary>
/// The first page of the events response.
/// </summary>
private readonly TResponse firstPage;
/// <summary>
/// Initializes a new instance of the
/// <see cref="Cronofy.CronofyAccountClient.PagedResultsIterator{TResponse,TResult}"/>
/// class.
/// </summary>
/// <param name="httpClient">
/// The HTTP client to use for requests, must not be null.
/// </param>
/// <param name="accessToken">
/// The access token for the OAuth authorization for the account,
/// must not be empty.
/// </param>
/// <param name="firstRequest">
/// The request for the first page of results, must not be null.
/// </param>
/// <exception cref="System.ArgumentException">
/// Thrown if <paramref name="httpClient"/> or
/// <paramref name="firstRequest"/> are null, of if
/// <paramref name="accessToken"/> is empty.
/// </exception>
public PagedResultsIterator(IHttpClient httpClient, string accessToken, HttpRequest firstRequest)
{
Preconditions.NotNull("httpClient", httpClient);
Preconditions.NotEmpty("accessToken", accessToken);
Preconditions.NotNull("firstRequest", firstRequest);
this.httpClient = httpClient;
this.accessToken = accessToken;
// Eagerly fetch the first page to hit access token and validation issues.
this.firstPage = this.httpClient.GetJsonResponse<TResponse>(firstRequest);
}
/// <inheritdoc/>
public IEnumerator<TResult> GetEnumerator()
{
return this.GetResults().GetEnumerator();
}
/// <inheritdoc/>
IEnumerator IEnumerable.GetEnumerator()
{
return this.GetEnumerator();
}
/// <summary>
/// Gets all the events from the result set.
/// </summary>
/// <returns>
/// All the events from the result set.
/// </returns>
private IEnumerable<TResult> GetResults()
{
return this.GetPages().SelectMany(page => page.GetResults());
}
/// <summary>
/// Gets all the pages from the result set.
/// </summary>
/// <returns>
/// All the pages from the result set.
/// </returns>
private IEnumerable<TResponse> GetPages()
{
var currentPage = this.firstPage;
while (true)
{
yield return currentPage;
if (currentPage.Pages.NextPageUrl == null)
{
break;
}
currentPage = this.GetNextPageResponse(currentPage);
}
}
/// <summary>
/// Gets the next page response.
/// </summary>
/// <param name="currentPage">
/// The response for the current page.
/// </param>
/// <returns>
/// The next page response.
/// </returns>
private TResponse GetNextPageResponse(IPagedResultsResponse<TResult> currentPage)
{
var request = new HttpRequest
{
Method = "GET",
Url = currentPage.Pages.NextPageUrl,
};
request.AddOAuthAuthorization(this.accessToken);
return this.httpClient.GetJsonResponse<TResponse>(request);
}
}
}
}
| |
// Copyright 2012 Henrik Feldt, Chris Patterson, et. al.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
namespace MassTransit.Transports.AzureServiceBus
{
using System;
using System.Collections.Generic;
using Configuration;
using Configuration.Builders;
using Configuration.Configurators;
using Exceptions;
using Logging;
using Magnum.Caching;
using Util;
/// <summary>
/// Implementation of the transport factory
/// </summary>
public class AzureServiceBusTransportFactory :
ITransportFactory
{
static readonly ILog _log = Logger.Get(typeof(AzureServiceBusTransportFactory));
readonly Cache<Uri, IAzureServiceBusEndpointAddress> _addresses;
readonly Cache<string, IConnectionSettings> _connectionSettings;
readonly Cache<string, ConnectionSettingsBuilder> _connectionSettingsBuilders;
readonly Cache<string, ConnectionHandler<AzureServiceBusConnection>> _connections;
readonly AzureServiceBusMessageNameFormatter _formatter;
readonly IInboundSettings _inboundSettings;
readonly IOutboundSettings _outboundSettings;
bool _disposed;
public AzureServiceBusTransportFactory(IEnumerable<ConnectionSettingsBuilder> builders,
IInboundSettings inboundSettings, IOutboundSettings outboundSettings)
{
_addresses = new ConcurrentCache<Uri, IAzureServiceBusEndpointAddress>();
_connections = new ConcurrentCache<string, ConnectionHandler<AzureServiceBusConnection>>();
_connectionSettings = new ConcurrentCache<string, IConnectionSettings>(StringComparer.InvariantCultureIgnoreCase);
_connectionSettingsBuilders = new ConcurrentCache<string, ConnectionSettingsBuilder>(x => x.Namespace, builders);
_formatter = new AzureServiceBusMessageNameFormatter();
_inboundSettings = inboundSettings;
_outboundSettings = outboundSettings;
_log.Debug("created new transport factory");
}
public IEndpointAddress GetAddress(Uri uri, bool transactional)
{
return _addresses.Get(uri, _ => AzureServiceBusEndpointAddress.Parse(uri, GetConnectionSettings(uri.Host)));
}
/// <summary>
/// Gets the scheme. (af-queues)
/// </summary>
public string Scheme
{
get { return Constants.Scheme; }
}
/// <summary>
/// The message name formatter associated with this transport
/// </summary>
public IMessageNameFormatter MessageNameFormatter
{
get { return _formatter; }
}
/// <summary>
/// Builds the duplex transport.
/// </summary>
/// <param name="settings"> The settings. </param>
/// <returns> </returns>
public IDuplexTransport BuildLoopback([NotNull] ITransportSettings settings)
{
if (settings == null)
throw new ArgumentNullException("settings");
_log.Debug("building duplex transport");
return new Transport(settings.Address, () => BuildInbound(settings), () => BuildOutbound(settings));
}
/// <summary>
/// Builds the inbound transport for the service bus endpoint,
/// </summary>
/// <param name="settings"> using these settings </param>
/// <returns> A non-null instance of the inbound transport. </returns>
public virtual IInboundTransport BuildInbound([NotNull] ITransportSettings settings)
{
if (settings == null)
throw new ArgumentNullException("settings");
Uri uri = settings.Address.Uri;
EnsureProtocolIsCorrect(uri);
IAzureServiceBusEndpointAddress address = _addresses.Get(uri,
key => AzureServiceBusEndpointAddress.Parse(uri, GetConnectionSettings(uri.Host)));
_log.DebugFormat("building inbound transport for address '{0}'", address);
ConnectionHandler<AzureServiceBusConnection> connectionHandler = GetConnection(_connections, address);
return new InboundAzureServiceBusTransport(address, connectionHandler,
MessageNameFormatter, _inboundSettings);
}
/// <summary>
/// Builds the outbound transport
/// </summary>
/// <param name="settings"> with settings </param>
/// <returns> The outbound transport instance, non-null </returns>
public virtual IOutboundTransport BuildOutbound([NotNull] ITransportSettings settings)
{
if (settings == null)
throw new ArgumentNullException("settings");
Uri uri = settings.Address.Uri;
EnsureProtocolIsCorrect(uri);
IAzureServiceBusEndpointAddress address = _addresses.Get(uri,
key => AzureServiceBusEndpointAddress.Parse(uri, GetConnectionSettings(uri.Host)));
_log.DebugFormat("building outbound transport for address '{0}'", address);
ConnectionHandler<AzureServiceBusConnection> connectionHandler = GetConnection(_connections, address);
return new OutboundAzureServiceBusTransport(address, connectionHandler, _outboundSettings);
}
/// <summary>
/// Builds the outbound error transport; where to send messages that fail.
/// </summary>
/// <param name="settings"> using these settings </param>
/// <returns> The outbound transport instance, non null </returns>
public virtual IOutboundTransport BuildError([NotNull] ITransportSettings settings)
{
if (settings == null)
throw new ArgumentNullException("settings");
Uri uri = settings.Address.Uri;
EnsureProtocolIsCorrect(uri);
IAzureServiceBusEndpointAddress address = _addresses.Get(uri,
key => AzureServiceBusEndpointAddress.Parse(uri, GetConnectionSettings(uri.Host)));
_log.DebugFormat("building error transport for address '{0}'", address);
ConnectionHandler<AzureServiceBusConnection> connectionHandler = GetConnection(_connections, address);
return new OutboundAzureServiceBusTransport(address, connectionHandler, _outboundSettings);
}
public void Dispose()
{
if (_disposed)
return;
_connections.Each(x => x.Dispose());
_connections.Clear();
_addresses.Clear();
_disposed = true;
}
IConnectionSettings GetConnectionSettings(string ns)
{
return _connectionSettings.Get(ns, _ =>
{
var builder = _connectionSettingsBuilders.Get(ns, __ =>
{
throw new ArgumentException("Unable to get the settings for " + ns);
});
return builder.Build();
});
}
/// <summary>
/// Ensures the protocol is correct.
/// </summary>
/// <param name="address"> The address. </param>
void EnsureProtocolIsCorrect([NotNull] Uri address)
{
if (address == null)
throw new ArgumentNullException("address");
if (address.Scheme != Scheme)
{
throw new EndpointException(address,
string.Format("Address must start with '{1}' not '{0}'", address.Scheme, Scheme));
}
}
ConnectionHandler<AzureServiceBusConnection> GetConnection(
Cache<string, ConnectionHandler<AzureServiceBusConnection>> cache,
IAzureServiceBusEndpointAddress address)
{
var ns = address.Uri.Host;
return cache.Get(ns, _ =>
{
if (_log.IsDebugEnabled)
_log.DebugFormat("Creating Azure Service Bus connection: {0}", address.Uri);
ConnectionSettingsBuilder builder = _connectionSettingsBuilders.Get(ns, __ =>
{
if (_log.IsDebugEnabled)
_log.DebugFormat("Using default configurator for connection: {0}", address.Uri);
var configurator = new NamespaceConnectionSettingsConfigurator(ns);
return configurator.Configure();
});
IConnectionSettings connectionSettings = builder.Build();
var connection = new AzureServiceBusConnectionImpl(address, connectionSettings.TokenProvider);
return new ConnectionHandlerImpl<AzureServiceBusConnection>(connection);
});
}
}
}
| |
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
using System;
using System.Text;
namespace UMA
{
/// <summary>
/// Utility class for merging multiple skinned meshes.
/// </summary>
public static class SkinnedMeshCombiner
{
/// <summary>
/// Container for source mesh data.
/// </summary>
public class CombineInstance
{
public UMAMeshData meshData;
public int[] targetSubmeshIndices;
}
private enum MeshComponents
{
none = 0,
has_normals = 1,
has_tangents = 2,
has_colors32 = 4,
has_uv = 8,
has_uv2 = 16,
has_uv3 = 32,
has_uv4 = 64
}
static Dictionary<int, BoneIndexEntry> bonesCollection;
static List<Matrix4x4> bindPoses;
static List<int> bonesList;
/// <summary>
/// Combines a set of meshes into the target mesh.
/// </summary>
/// <param name="target">Target.</param>
/// <param name="sources">Sources.</param>
public static void CombineMeshes(UMAMeshData target, CombineInstance[] sources)
{
int vertexCount = 0;
int bindPoseCount = 0;
int transformHierarchyCount = 0;
MeshComponents meshComponents = MeshComponents.none;
int subMeshCount = FindTargetSubMeshCount(sources);
var subMeshTriangleLength = new int[subMeshCount];
AnalyzeSources(sources, subMeshTriangleLength, ref vertexCount, ref bindPoseCount, ref transformHierarchyCount, ref meshComponents);
int[][] submeshTriangles = new int[subMeshCount][];
for (int i = 0; i < subMeshTriangleLength.Length; i++)
{
submeshTriangles[i] = new int[subMeshTriangleLength[i]];
subMeshTriangleLength[i] = 0;
}
bool has_normals = (meshComponents & MeshComponents.has_normals) != MeshComponents.none;
bool has_tangents = (meshComponents & MeshComponents.has_tangents) != MeshComponents.none;
bool has_uv = (meshComponents & MeshComponents.has_uv) != MeshComponents.none;
bool has_uv2 = (meshComponents & MeshComponents.has_uv2) != MeshComponents.none;
#if !UNITY_4_6
bool has_uv3 = (meshComponents & MeshComponents.has_uv3) != MeshComponents.none;
bool has_uv4 = (meshComponents & MeshComponents.has_uv4) != MeshComponents.none;
#endif
bool has_colors32 = (meshComponents & MeshComponents.has_colors32) != MeshComponents.none;
Vector3[] vertices = EnsureArrayLength(target.vertices, vertexCount);
BoneWeight[] boneWeights = EnsureArrayLength(target.unityBoneWeights, vertexCount);
Vector3[] normals = has_normals ? EnsureArrayLength(target.normals, vertexCount) : null;
Vector4[] tangents = has_tangents ? EnsureArrayLength(target.tangents, vertexCount) : null;
Vector2[] uv = has_uv ? EnsureArrayLength(target.uv, vertexCount) : null;
Vector2[] uv2 = has_uv2 ? EnsureArrayLength(target.uv2, vertexCount) : null;
#if !UNITY_4_6
Vector2[] uv3 = has_uv3 ? EnsureArrayLength(target.uv3, vertexCount) : null;
Vector2[] uv4 = has_uv4 ? EnsureArrayLength(target.uv4, vertexCount) : null;
#endif
Color32[] colors32 = has_colors32 ? EnsureArrayLength(target.colors32, vertexCount) : null;
UMATransform[] umaTransforms = EnsureArrayLength(target.umaBones, transformHierarchyCount);
int boneCount = 0;
foreach (var source in sources)
{
MergeSortedTransforms(umaTransforms, ref boneCount, source.meshData.umaBones);
}
int vertexIndex = 0;
if (bonesCollection == null)
bonesCollection = new Dictionary<int, BoneIndexEntry>(boneCount);
else
bonesCollection.Clear();
if (bindPoses == null)
bindPoses = new List<Matrix4x4>(bindPoseCount);
else
bindPoses.Clear();
if (bonesList == null)
bonesList = new List<int>(boneCount);
else
bonesList.Clear();
foreach (var source in sources)
{
vertexCount = source.meshData.vertices.Length;
BuildBoneWeights(source.meshData.boneWeights, 0, boneWeights, vertexIndex, vertexCount, source.meshData.boneNameHashes, source.meshData.bindPoses, bonesCollection, bindPoses, bonesList);
Array.Copy(source.meshData.vertices, 0, vertices, vertexIndex, vertexCount);
if (has_normals)
{
if (source.meshData.normals != null && source.meshData.normals.Length > 0)
{
Array.Copy(source.meshData.normals, 0, normals, vertexIndex, vertexCount);
}
else
{
FillArray(tangents, vertexIndex, vertexCount, Vector3.zero);
}
}
if (has_tangents)
{
if (source.meshData.tangents != null && source.meshData.tangents.Length > 0)
{
Array.Copy(source.meshData.tangents, 0, tangents, vertexIndex, vertexCount);
}
else
{
FillArray(tangents, vertexIndex, vertexCount, Vector4.zero);
}
}
if (has_uv)
{
if (source.meshData.uv != null && source.meshData.uv.Length >= vertexCount)
{
Array.Copy(source.meshData.uv, 0, uv, vertexIndex, vertexCount);
}
else
{
FillArray(uv, vertexIndex, vertexCount, Vector4.zero);
}
}
if (has_uv2)
{
if (source.meshData.uv2 != null && source.meshData.uv2.Length >= vertexCount)
{
Array.Copy(source.meshData.uv2, 0, uv2, vertexIndex, vertexCount);
}
else
{
FillArray(uv2, vertexIndex, vertexCount, Vector4.zero);
}
}
#if !UNITY_4_6
if (has_uv3)
{
if (source.meshData.uv3 != null && source.meshData.uv3.Length >= vertexCount)
{
Array.Copy(source.meshData.uv3, 0, uv3, vertexIndex, vertexCount);
}
else
{
FillArray(uv3, vertexIndex, vertexCount, Vector4.zero);
}
}
if (has_uv4)
{
if (source.meshData.uv4 != null && source.meshData.uv4.Length >= vertexCount)
{
Array.Copy(source.meshData.uv4, 0, uv4, vertexIndex, vertexCount);
}
else
{
FillArray(uv4, vertexIndex, vertexCount, Vector4.zero);
}
}
#endif
if (has_colors32)
{
if (source.meshData.colors32 != null && source.meshData.colors32.Length > 0)
{
Array.Copy(source.meshData.colors32, 0, colors32, vertexIndex, vertexCount);
}
else
{
Color32 white32 = Color.white;
FillArray(colors32, vertexIndex, vertexCount, white32);
}
}
for (int i = 0; i < source.meshData.subMeshCount; i++)
{
if (source.targetSubmeshIndices[i] >= 0)
{
int[] subTriangles = source.meshData.submeshes[i].triangles;
int triangleLength = subTriangles.Length;
int destMesh = source.targetSubmeshIndices[i];
CopyIntArrayAdd(subTriangles, 0, submeshTriangles[destMesh], subMeshTriangleLength[destMesh], triangleLength, vertexIndex);
subMeshTriangleLength[destMesh] += triangleLength;
}
}
vertexIndex += vertexCount;
}
vertexCount = vertexIndex;
// fill in new values.
target.vertexCount = vertexCount;
target.vertices = vertices;
target.unityBoneWeights = boneWeights;
target.bindPoses = bindPoses.ToArray();
target.normals = normals;
target.tangents = tangents;
target.uv = uv;
target.uv2 = uv2;
#if !UNITY_4_6
target.uv3 = uv3;
target.uv4 = uv4;
#endif
target.colors32 = colors32;
target.subMeshCount = subMeshCount;
target.submeshes = new SubMeshTriangles[subMeshCount];
target.umaBones = umaTransforms;
target.umaBoneCount = boneCount;
for (int i = 0; i < subMeshCount; i++)
{
target.submeshes[i].triangles = submeshTriangles[i];
}
target.boneNameHashes = bonesList.ToArray();
}
private static void MergeSortedTransforms(UMATransform[] mergedTransforms, ref int len1, UMATransform[] umaTransforms)
{
int newBones = 0;
int pos1 = 0;
int pos2 = 0;
int len2 = umaTransforms.Length;
while(pos1 < len1 && pos2 < len2 )
{
long i = ((long)mergedTransforms[pos1].hash) - ((long)umaTransforms[pos2].hash);
if (i == 0)
{
pos1++;
pos2++;
}
else if (i < 0)
{
pos1++;
}
else
{
pos2++;
newBones++;
}
}
newBones += len2 - pos2;
pos1 = len1 - 1;
pos2 = len2 - 1;
len1 += newBones;
int dest = len1-1;
while (pos1 >= 0 && pos2 >= 0)
{
long i = ((long)mergedTransforms[pos1].hash) - ((long)umaTransforms[pos2].hash);
if (i == 0)
{
mergedTransforms[dest] = mergedTransforms[pos1];
pos1--;
pos2--;
}
else if (i > 0)
{
mergedTransforms[dest] = mergedTransforms[pos1];
pos1--;
}
else
{
mergedTransforms[dest] = umaTransforms[pos2];
pos2--;
}
dest--;
}
while (pos2 >= 0)
{
mergedTransforms[dest] = umaTransforms[pos2];
pos2--;
dest--;
}
}
private static void AnalyzeSources(CombineInstance[] sources, int[] subMeshTriangleLength, ref int vertexCount, ref int bindPoseCount, ref int transformHierarchyCount, ref MeshComponents meshComponents)
{
for (int i = 0; i < subMeshTriangleLength.Length; i++)
{
subMeshTriangleLength[i] = 0;
}
foreach (var source in sources)
{
vertexCount += source.meshData.vertices.Length;
bindPoseCount += source.meshData.bindPoses.Length;
transformHierarchyCount += source.meshData.umaBones.Length;
if (source.meshData.normals != null && source.meshData.normals.Length != 0) meshComponents |= MeshComponents.has_normals;
if (source.meshData.tangents != null && source.meshData.tangents.Length != 0) meshComponents |= MeshComponents.has_tangents;
if (source.meshData.uv != null && source.meshData.uv.Length != 0) meshComponents |= MeshComponents.has_uv;
if (source.meshData.uv2 != null && source.meshData.uv2.Length != 0) meshComponents |= MeshComponents.has_uv2;
#if !UNITY_4_6
if (source.meshData.uv3 != null && source.meshData.uv3.Length != 0) meshComponents |= MeshComponents.has_uv3;
if (source.meshData.uv4 != null && source.meshData.uv4.Length != 0) meshComponents |= MeshComponents.has_uv4;
#endif
if (source.meshData.colors32 != null && source.meshData.colors32.Length != 0) meshComponents |= MeshComponents.has_colors32;
for (int i = 0; i < source.meshData.subMeshCount; i++)
{
if (source.targetSubmeshIndices[i] >= 0)
{
int triangleLength = source.meshData.submeshes[i].triangles.Length;
subMeshTriangleLength[source.targetSubmeshIndices[i]] += triangleLength;
}
}
}
}
private static int FindTargetSubMeshCount(CombineInstance[] sources)
{
int highestTargetIndex = -1;
foreach (var source in sources)
{
foreach (var targetIndex in source.targetSubmeshIndices)
{
if (highestTargetIndex < targetIndex)
{
highestTargetIndex = targetIndex;
}
}
}
return highestTargetIndex + 1;
}
private static void BuildBoneWeights(UMABoneWeight[] source, int sourceIndex, BoneWeight[] dest, int destIndex, int count, int[] bones, Matrix4x4[] bindPoses, Dictionary<int, BoneIndexEntry> bonesCollection, List<Matrix4x4> bindPosesList, List<int> bonesList)
{
int[] boneMapping = new int[bones.Length];
for (int i = 0; i < boneMapping.Length; i++)
{
boneMapping[i] = TranslateBoneIndex(i, bones, bindPoses, bonesCollection, bindPosesList, bonesList);
}
while (count-- > 0)
{
TranslateBoneWeight(ref source[sourceIndex++], ref dest[destIndex++], boneMapping);
}
}
private static void TranslateBoneWeight(ref UMABoneWeight source, ref BoneWeight dest, int[] boneMapping)
{
dest.weight0 = source.weight0;
dest.weight1 = source.weight1;
dest.weight2 = source.weight2;
dest.weight3 = source.weight3;
dest.boneIndex0 = boneMapping[source.boneIndex0];
dest.boneIndex1 = boneMapping[source.boneIndex1];
dest.boneIndex2 = boneMapping[source.boneIndex2];
dest.boneIndex3 = boneMapping[source.boneIndex3];
}
private struct BoneIndexEntry
{
public int index;
public List<int> indices;
public int Count { get { return index >= 0 ? 1 : indices.Count; } }
public int this[int idx]
{
get
{
if (index >= 0)
{
if (idx == 0) return index;
throw new ArgumentOutOfRangeException();
}
return indices[idx];
}
}
internal void AddIndex(int idx)
{
if (index >= 0)
{
indices = new List<int>(10);
indices.Add(index);
index = -1;
}
indices.Add(idx);
}
}
private static bool CompareSkinningMatrices(Matrix4x4 m1, ref Matrix4x4 m2)
{
if (Mathf.Abs(m1.m00 - m2.m00) > 0.0001) return false;
if (Mathf.Abs(m1.m01 - m2.m01) > 0.0001) return false;
if (Mathf.Abs(m1.m02 - m2.m02) > 0.0001) return false;
if (Mathf.Abs(m1.m03 - m2.m03) > 0.0001) return false;
if (Mathf.Abs(m1.m10 - m2.m10) > 0.0001) return false;
if (Mathf.Abs(m1.m11 - m2.m11) > 0.0001) return false;
if (Mathf.Abs(m1.m12 - m2.m12) > 0.0001) return false;
if (Mathf.Abs(m1.m13 - m2.m13) > 0.0001) return false;
if (Mathf.Abs(m1.m20 - m2.m20) > 0.0001) return false;
if (Mathf.Abs(m1.m21 - m2.m21) > 0.0001) return false;
if (Mathf.Abs(m1.m22 - m2.m22) > 0.0001) return false;
if (Mathf.Abs(m1.m23 - m2.m23) > 0.0001) return false;
// These never change in a TRS Matrix4x4
// if (Mathf.Abs(m1.m30 - m2.m30) > 0.0001) return false;
// if (Mathf.Abs(m1.m31 - m2.m31) > 0.0001) return false;
// if (Mathf.Abs(m1.m32 - m2.m32) > 0.0001) return false;
// if (Mathf.Abs(m1.m33 - m2.m33) > 0.0001) return false;
return true;
}
private static int TranslateBoneIndex(int index, int[] bonesHashes, Matrix4x4[] bindPoses, Dictionary<int, BoneIndexEntry> bonesCollection, List<Matrix4x4> bindPosesList, List<int> bonesList)
{
var boneTransform = bonesHashes[index];
BoneIndexEntry entry;
if (bonesCollection.TryGetValue(boneTransform, out entry))
{
for (int i = 0; i < entry.Count; i++)
{
var res = entry[i];
if (CompareSkinningMatrices(bindPosesList[res], ref bindPoses[index]))
{
return res;
}
}
var idx = bindPosesList.Count;
entry.AddIndex(idx);
bindPosesList.Add(bindPoses[index]);
bonesList.Add(boneTransform);
return idx;
}
else
{
var idx = bindPosesList.Count;
bonesCollection.Add(boneTransform, new BoneIndexEntry() { index = idx });
bindPosesList.Add(bindPoses[index]);
bonesList.Add(boneTransform);
return idx;
}
}
private static void CopyColorsToColors32(Color[] source, int sourceIndex, Color32[] dest, int destIndex, int count)
{
while (count-- > 0)
{
var sColor = source[sourceIndex++];
dest[destIndex++] = new Color32((byte)Mathf.RoundToInt(sColor.r * 255f), (byte)Mathf.RoundToInt(sColor.g * 255f), (byte)Mathf.RoundToInt(sColor.b * 255f), (byte)Mathf.RoundToInt(sColor.a * 255f));
}
}
private static void FillArray(Vector4[] array, int index, int count, Vector4 value)
{
while (count-- > 0)
{
array[index++] = value;
}
}
private static void FillArray(Vector3[] array, int index, int count, Vector3 value)
{
while (count-- > 0)
{
array[index++] = value;
}
}
private static void FillArray(Vector2[] array, int index, int count, Vector2 value)
{
while (count-- > 0)
{
array[index++] = value;
}
}
private static void FillArray(Color[] array, int index, int count, Color value)
{
while (count-- > 0)
{
array[index++] = value;
}
}
private static void FillArray(Color32[] array, int index, int count, Color32 value)
{
while (count-- > 0)
{
array[index++] = value;
}
}
private static void CopyIntArrayAdd(int[] source, int sourceIndex, int[] dest, int destIndex, int count, int add)
{
for (int i = 0; i < count; i++)
{
dest[destIndex++] = source[sourceIndex++] + add;
}
}
private static T[] EnsureArrayLength<T>(T[] oldArray, int newLength)
{
if (newLength <= 0)
return null;
if (oldArray != null && oldArray.Length >= newLength)
return oldArray;
return new T[newLength];
}
}
}
| |
//
// System.UriBuilder
//
// Author:
// Lawrence Pit ([email protected])
//
// Copyright (C) 2005, 2010 Novell, Inc (http://www.novell.com)
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using System.Collections;
using System.Runtime.Serialization;
using System.Text;
// See RFC 2396 for more info on URI's.
namespace System
{
public class UriBuilder
{
private string scheme;
private string host;
private int port;
private string path;
private string query;
private string fragment;
private string username;
private string password;
private Uri uri;
private bool modified;
// Constructors
public UriBuilder()
{
Initialize(Uri.UriSchemeHttp, "localhost", -1, String.Empty, String.Empty);
}
public UriBuilder(string uri)
{
if (uri == null)
throw new ArgumentNullException("uriString");
Uri u = null;
if (Uri.TryCreate(uri, UriKind.Absolute, out u))
{
Initialize(u);
}
else if (!uri.Contains(Uri.SchemeDelimiter))
{
// second chance, UriBuilder parsing is more forgiving than Uri
Initialize(new Uri(Uri.UriSchemeHttp + Uri.SchemeDelimiter + uri));
}
else
throw new UriFormatException();
}
public UriBuilder(Uri uri)
{
#if NET_4_0
if (uri == null)
throw new ArgumentNullException ("uri");
#endif
Initialize(uri);
}
public UriBuilder(string schemeName, string hostName)
{
Initialize(schemeName, hostName, -1, String.Empty, String.Empty);
}
public UriBuilder(string scheme, string host, int portNumber)
{
Initialize(scheme, host, portNumber, String.Empty, String.Empty);
}
public UriBuilder(string scheme, string host, int port, string pathValue)
{
Initialize(scheme, host, port, pathValue, String.Empty);
}
public UriBuilder(string scheme, string host, int port, string path, string extraValue)
{
Initialize(scheme, host, port, path, extraValue);
}
private void Initialize(Uri uri)
{
Initialize(uri.Scheme, uri.Host, uri.Port, uri.AbsolutePath, String.Empty);
fragment = uri.Fragment;
query = uri.Query;
username = uri.UserInfo;
int pos = username.IndexOf(':');
if (pos != -1)
{
password = username.Substring(pos + 1);
username = username.Substring(0, pos);
}
else
{
password = String.Empty;
}
}
private void Initialize(string scheme, string host, int port, string pathValue, string extraValue)
{
modified = true;
Scheme = scheme;
Host = host;
Port = port;
Path = pathValue;
query = String.Empty;
fragment = String.Empty;
Path = pathValue;
username = String.Empty;
password = String.Empty;
if (String.IsNullOrEmpty(extraValue))
return;
if (extraValue[0] == '#')
Fragment = extraValue.Remove(0, 1);
else if (extraValue[0] == '?')
Query = extraValue.Remove(0, 1);
else
throw new ArgumentException("extraValue");
}
// Properties
public string Fragment
{
get { return fragment; }
set
{
fragment = value;
if (fragment == null)
fragment = String.Empty;
else if (fragment.Length > 0)
fragment = "#" + value.Replace("%23", "#");
modified = true;
}
}
public string Host
{
get { return host; }
set
{
if (String.IsNullOrEmpty(value))
host = String.Empty;
else if ((value.IndexOf(':') != -1) && (value[0] != '['))
{
host = "[" + value + "]";
}
else
{
host = value;
}
modified = true;
}
}
public string Password
{
get { return password; }
set
{
password = (value == null) ? String.Empty : value;
}
}
public string Path
{
get { return path; }
set
{
if (value == null || value.Length == 0)
{
path = "/";
}
else
{
//TODO: path = Uri.EscapeString(value.Replace('\\', '/'), Uri.EscapeCommonHexBracketsQuery);
path = value.Replace('\\', '/');
}
modified = true;
}
}
public int Port
{
get { return port; }
set
{
if (value < -1)
throw new ArgumentOutOfRangeException("value");
// apparently it is
port = value;
modified = true;
}
}
public string Query
{
get { return query; }
set
{
// LAMESPEC: it doesn't say to always prepend a
// question mark to the value.. it does say this
// for fragment.
if (value == null || value.Length == 0)
query = String.Empty;
else
query = "?" + value;
modified = true;
}
}
public string Scheme
{
get { return scheme; }
set
{
if (value == null)
value = String.Empty;
int colonPos = value.IndexOf(':');
if (colonPos != -1)
value = value.Substring(0, colonPos);
scheme = value.ToLower();
modified = true;
}
}
public Uri Uri
{
get
{
if (!modified)
return uri;
uri = new Uri(ToString());
// some properties are updated once the Uri is created - see unit tests
host = uri.Host;
path = uri.AbsolutePath;
modified = false;
return uri;
}
}
public string UserName
{
get { return username; }
set
{
username = (value == null) ? String.Empty : value;
modified = true;
}
}
// Methods
public override bool Equals(object rparam)
{
return (rparam == null) ? false : this.Uri.Equals(rparam.ToString());
}
public override int GetHashCode()
{
return this.Uri.GetHashCode();
}
public override string ToString()
{
StringBuilder builder = new StringBuilder();
builder.Append(scheme);
// note: mailto and news use ':', not "://", as their delimiter
builder.Append(Uri.GetSchemeDelimiter(scheme));
if (username != String.Empty)
{
builder.Append(username);
if (password != String.Empty)
builder.Append(":" + password);
builder.Append('@');
}
if (host.Length > 0)
{
builder.Append(host);
if (port > 0)
builder.Append(":" + port);
}
if (path != String.Empty &&
builder[builder.Length - 1] != '/' &&
path.Length > 0 && path[0] != '/')
builder.Append('/');
builder.Append(path);
builder.Append(query);
builder.Append(fragment);
return builder.ToString();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace Apache.Ignite.Examples.Datagrid
{
using System;
using Apache.Ignite.Core;
using Apache.Ignite.Core.Binary;
using Apache.Ignite.Core.Cache;
using Apache.Ignite.Core.Cache.Configuration;
using Apache.Ignite.Core.Cache.Query;
/// <summary>
/// This example works with cache entirely in binary mode: no classes or configurations are needed.
/// <para />
/// 1) Set this class as startup object (Apache.Ignite.Examples project -> right-click -> Properties ->
/// Application -> Startup object);
/// 2) Start example (F5 or Ctrl+F5).
/// <para />
/// This example can be run with standalone Apache Ignite.NET node:
/// 1) Run %IGNITE_HOME%/platforms/dotnet/bin/Apache.Ignite.exe:
/// Apache.Ignite.exe -configFileName=platforms\dotnet\examples\apache.ignite.examples\app.config
/// 2) Start example.
/// </summary>
public class BinaryModeExample
{
/// <summary>Cache name.</summary>
private const string CacheName = "dotnet_binary_cache";
/// <summary>Person type name.</summary>
private const string PersonType = "Person";
/// <summary>Company type name.</summary>
private const string CompanyType = "Company";
/// <summary>Name field name.</summary>
private const string NameField = "Name";
/// <summary>Company ID field name.</summary>
private const string CompanyIdField = "CompanyId";
/// <summary>ID field name.</summary>
private const string IdField = "Id";
[STAThread]
public static void Main()
{
using (var ignite = Ignition.StartFromApplicationConfiguration())
{
Console.WriteLine();
Console.WriteLine(">>> Binary mode example started.");
// Create new cache and configure queries for Person and Company binary types.
// Note that there are no such classes defined.
var cache0 = ignite.GetOrCreateCache<object, object>(new CacheConfiguration
{
Name = CacheName,
QueryEntities = new[]
{
new QueryEntity
{
KeyType = typeof(int),
ValueTypeName = PersonType,
Fields = new[]
{
new QueryField(NameField, typeof(string)),
new QueryField(CompanyIdField, typeof(int)),
},
Indexes = new[]
{
new QueryIndex(false, QueryIndexType.FullText, NameField),
new QueryIndex(false, QueryIndexType.Sorted, CompanyIdField)
}
},
new QueryEntity
{
KeyType = typeof(int),
ValueTypeName = CompanyType,
Fields = new[]
{
new QueryField(IdField, typeof(int)),
new QueryField(NameField, typeof(string))
}
}
}
});
// Switch to binary mode to work with data in serialized form.
var cache = cache0.WithKeepBinary<int, IBinaryObject>();
// Clean up caches on all nodes before run.
cache.Clear();
// Populate cache with sample data entries.
PopulateCache(cache);
// Run read & modify example.
ReadModifyExample(cache);
// Run SQL query example.
SqlQueryExample(cache);
// Run SQL query with join example.
SqlJoinQueryExample(cache);
// Run full text query example.
FullTextQueryExample(cache);
Console.WriteLine();
}
Console.WriteLine();
Console.WriteLine(">>> Example finished, press any key to exit ...");
Console.ReadKey();
}
/// <summary>
/// Reads binary object fields and modifies them.
/// </summary>
/// <param name="cache">Cache.</param>
private static void ReadModifyExample(ICache<int, IBinaryObject> cache)
{
const int id = 1;
IBinaryObject person = cache[id];
string name = person.GetField<string>(NameField);
Console.WriteLine();
Console.WriteLine(">>> Name of the person with id {0}: {1}", id, name);
// Modify the binary object.
cache[id] = person.ToBuilder().SetField("Name", name + " Jr.").Build();
Console.WriteLine(">>> Modified person with id {0}: {1}", id, cache[1]);
}
/// <summary>
/// Queries names for all persons.
/// </summary>
/// <param name="cache">Cache.</param>
private static void SqlQueryExample(ICache<int, IBinaryObject> cache)
{
var qry = cache.Query(new SqlFieldsQuery("select name from Person order by name"));
Console.WriteLine();
Console.WriteLine(">>> All person names:");
foreach (var row in qry)
Console.WriteLine(">>> " + row[0]);
}
/// <summary>
/// Queries persons that work for company with provided name.
/// </summary>
/// <param name="cache">Cache.</param>
private static void SqlJoinQueryExample(ICache<int, IBinaryObject> cache)
{
const string orgName = "Apache";
var qry = cache.Query(new SqlFieldsQuery(
"select pers.Name from Person as pers, Company as comp where pers.CompanyId = comp.Id and comp.Name = ?",
orgName)
{
EnableDistributedJoins = true,
Timeout = new TimeSpan(0, 1, 0)
});
Console.WriteLine();
Console.WriteLine(">>> Persons working for " + orgName + ":");
foreach (var entry in qry)
Console.WriteLine(">>> " + entry[0]);
}
/// <summary>
/// Queries persons that have a specific name using full-text query API.
/// </summary>
/// <param name="cache">Cache.</param>
private static void FullTextQueryExample(ICache<int, IBinaryObject> cache)
{
var qry = cache.Query(new TextQuery(PersonType, "Peters"));
Console.WriteLine();
Console.WriteLine(">>> Persons named Peters:");
foreach (var entry in qry)
Console.WriteLine(">>> " + entry.Value);
}
/// <summary>
/// Populate cache with data for this example.
/// </summary>
/// <param name="cache">Cache.</param>
private static void PopulateCache(ICache<int, IBinaryObject> cache)
{
IBinary binary = cache.Ignite.GetBinary();
// Populate persons.
cache[1] = binary.GetBuilder(PersonType)
.SetField(NameField, "James Wilson")
.SetField(CompanyIdField, -1)
.Build();
cache[2] = binary.GetBuilder(PersonType)
.SetField(NameField, "Daniel Adams")
.SetField(CompanyIdField, -1)
.Build();
cache[3] = binary.GetBuilder(PersonType)
.SetField(NameField, "Cristian Moss")
.SetField(CompanyIdField, -1)
.Build();
cache[4] = binary.GetBuilder(PersonType)
.SetField(NameField, "Allison Mathis")
.SetField(CompanyIdField, -2)
.Build();
cache[5] = binary.GetBuilder(PersonType)
.SetField(NameField, "Breana Robbin")
.SetField(CompanyIdField, -2)
.Build();
cache[6] = binary.GetBuilder(PersonType)
.SetField(NameField, "Philip Horsley")
.SetField(CompanyIdField, -2)
.Build();
cache[7] = binary.GetBuilder(PersonType)
.SetField(NameField, "James Peters")
.SetField(CompanyIdField, -2)
.Build();
// Populate companies.
cache[-1] = binary.GetBuilder(CompanyType)
.SetField(NameField, "Apache")
.SetField(IdField, -1)
.Build();
cache[-2] = binary.GetBuilder(CompanyType)
.SetField(NameField, "Microsoft")
.SetField(IdField, -2)
.Build();
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.