context
stringlengths
2.52k
185k
gt
stringclasses
1 value
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. // ------------------------------------------------------------------------------ // Changes to this file must follow the http://aka.ms/api-review process. // ------------------------------------------------------------------------------ using System.Collections.Generic; using System.Security.Authentication; using System.Security.Cryptography.X509Certificates; using System.Threading; using System.Threading.Tasks; namespace System.Net.Security { public abstract class AuthenticatedStream : System.IO.Stream { protected AuthenticatedStream(System.IO.Stream innerStream, bool leaveInnerStreamOpen) { } public abstract bool IsAuthenticated { get; } public abstract bool IsEncrypted { get; } public abstract bool IsMutuallyAuthenticated { get; } public abstract bool IsServer { get; } public abstract bool IsSigned { get; } public bool LeaveInnerStreamOpen { get { throw null; } } protected System.IO.Stream InnerStream { get { throw null; } } protected override void Dispose(bool disposing) { } } public enum EncryptionPolicy { AllowNoEncryption = 1, NoEncryption = 2, RequireEncryption = 0, } public delegate System.Security.Cryptography.X509Certificates.X509Certificate LocalCertificateSelectionCallback(object sender, string targetHost, System.Security.Cryptography.X509Certificates.X509CertificateCollection localCertificates, System.Security.Cryptography.X509Certificates.X509Certificate remoteCertificate, string[] acceptableIssuers); public partial class NegotiateStream : AuthenticatedStream { public NegotiateStream(System.IO.Stream innerStream) : base(innerStream, false) { } public NegotiateStream(System.IO.Stream innerStream, bool leaveInnerStreamOpen) : base(innerStream, leaveInnerStreamOpen) { } public override bool CanRead { get { throw null; } } public override bool CanSeek { get { throw null; } } public override bool CanTimeout { get { throw null; } } public override bool CanWrite { get { throw null; } } public virtual System.Security.Principal.TokenImpersonationLevel ImpersonationLevel { get { throw null; } } public override bool IsAuthenticated { get { throw null; } } public override bool IsEncrypted { get { throw null; } } public override bool IsMutuallyAuthenticated { get { throw null; } } public override bool IsServer { get { throw null; } } public override bool IsSigned { get { throw null; } } public override long Length { get { throw null; } } public override long Position { get { throw null; } set { } } public override int ReadTimeout { get { throw null; } set { } } public virtual System.Security.Principal.IIdentity RemoteIdentity { get { throw null; } } public override int WriteTimeout { get { throw null; } set { } } public virtual void AuthenticateAsClient() { } public virtual void AuthenticateAsClient(System.Net.NetworkCredential credential, string targetName) { } public virtual void AuthenticateAsClient(System.Net.NetworkCredential credential, System.Security.Authentication.ExtendedProtection.ChannelBinding binding, string targetName) { } public virtual void AuthenticateAsClient(System.Net.NetworkCredential credential, string targetName, System.Net.Security.ProtectionLevel requiredProtectionLevel, System.Security.Principal.TokenImpersonationLevel allowedImpersonationLevel) { } public virtual void AuthenticateAsClient(System.Net.NetworkCredential credential, System.Security.Authentication.ExtendedProtection.ChannelBinding binding, string targetName, System.Net.Security.ProtectionLevel requiredProtectionLevel, System.Security.Principal.TokenImpersonationLevel allowedImpersonationLevel) { } public virtual void AuthenticateAsServer() { } public virtual void AuthenticateAsServer(System.Security.Authentication.ExtendedProtection.ExtendedProtectionPolicy policy) { } public virtual void AuthenticateAsServer(NetworkCredential credential, ProtectionLevel requiredProtectionLevel, System.Security.Principal.TokenImpersonationLevel requiredImpersonationLevel) { } public virtual void AuthenticateAsServer(NetworkCredential credential, System.Security.Authentication.ExtendedProtection.ExtendedProtectionPolicy policy, ProtectionLevel requiredProtectionLevel, System.Security.Principal.TokenImpersonationLevel requiredImpersonationLevel) { } public virtual System.Threading.Tasks.Task AuthenticateAsClientAsync() { throw null; } public virtual System.Threading.Tasks.Task AuthenticateAsClientAsync(NetworkCredential credential, string targetName) { throw null; } public virtual System.Threading.Tasks.Task AuthenticateAsClientAsync(NetworkCredential credential, System.Security.Authentication.ExtendedProtection.ChannelBinding binding, string targetName) { throw null; } public virtual System.Threading.Tasks.Task AuthenticateAsClientAsync(NetworkCredential credential, string targetName, ProtectionLevel requiredProtectionLevel, System.Security.Principal.TokenImpersonationLevel allowedImpersonationLevel) { throw null; } public virtual System.Threading.Tasks.Task AuthenticateAsClientAsync(NetworkCredential credential, System.Security.Authentication.ExtendedProtection.ChannelBinding binding, string targetName, ProtectionLevel requiredProtectionLevel, System.Security.Principal.TokenImpersonationLevel allowedImpersonationLevel) { throw null; } public virtual System.Threading.Tasks.Task AuthenticateAsServerAsync() { throw null; } public virtual System.Threading.Tasks.Task AuthenticateAsServerAsync(System.Security.Authentication.ExtendedProtection.ExtendedProtectionPolicy policy) { throw null; } public virtual System.Threading.Tasks.Task AuthenticateAsServerAsync(NetworkCredential credential, ProtectionLevel requiredProtectionLevel, System.Security.Principal.TokenImpersonationLevel requiredImpersonationLevel) { throw null; } public virtual System.Threading.Tasks.Task AuthenticateAsServerAsync(NetworkCredential credential, System.Security.Authentication.ExtendedProtection.ExtendedProtectionPolicy policy, ProtectionLevel requiredProtectionLevel, System.Security.Principal.TokenImpersonationLevel requiredImpersonationLevel) { throw null; } public virtual System.IAsyncResult BeginAuthenticateAsClient(System.AsyncCallback asyncCallback, object asyncState) { throw null; } public virtual System.IAsyncResult BeginAuthenticateAsClient(System.Net.NetworkCredential credential, string targetName, System.AsyncCallback asyncCallback, object asyncState) { throw null; } public virtual System.IAsyncResult BeginAuthenticateAsClient(System.Net.NetworkCredential credential, System.Security.Authentication.ExtendedProtection.ChannelBinding binding, string targetName, System.AsyncCallback asyncCallback, object asyncState) { throw null; } public virtual System.IAsyncResult BeginAuthenticateAsClient(System.Net.NetworkCredential credential, string targetName, System.Net.Security.ProtectionLevel requiredProtectionLevel, System.Security.Principal.TokenImpersonationLevel allowedImpersonationLevel, System.AsyncCallback asyncCallback, object asyncState) { throw null; } public virtual System.IAsyncResult BeginAuthenticateAsClient(System.Net.NetworkCredential credential, System.Security.Authentication.ExtendedProtection.ChannelBinding binding, string targetName, System.Net.Security.ProtectionLevel requiredProtectionLevel, System.Security.Principal.TokenImpersonationLevel allowedImpersonationLevel, System.AsyncCallback asyncCallback, object asyncState) { throw null; } public virtual System.IAsyncResult BeginAuthenticateAsServer(AsyncCallback asyncCallback, object asyncState) { throw null; } public virtual System.IAsyncResult BeginAuthenticateAsServer(System.Security.Authentication.ExtendedProtection.ExtendedProtectionPolicy policy, System.AsyncCallback asyncCallback, object asyncState) { throw null; } public virtual IAsyncResult BeginAuthenticateAsServer(System.Net.NetworkCredential credential, System.Net.Security.ProtectionLevel requiredProtectionLevel, System.Security.Principal.TokenImpersonationLevel requiredImpersonationLevel, System.AsyncCallback asyncCallback, object asyncState) { throw null; } public virtual IAsyncResult BeginAuthenticateAsServer(System.Net.NetworkCredential credential, System.Security.Authentication.ExtendedProtection.ExtendedProtectionPolicy policy, System.Net.Security.ProtectionLevel requiredProtectionLevel, System.Security.Principal.TokenImpersonationLevel requiredImpersonationLevel, System.AsyncCallback asyncCallback, object asyncState) { throw null; } public override IAsyncResult BeginRead(byte[] buffer, int offset, int count, AsyncCallback asyncCallback, object asyncState) { throw null; } public override IAsyncResult BeginWrite(byte[] buffer, int offset, int count, AsyncCallback asyncCallback, object asyncState) { throw null; } protected override void Dispose(bool disposing) { } public virtual void EndAuthenticateAsClient(System.IAsyncResult asyncResult) { } public virtual void EndAuthenticateAsServer(System.IAsyncResult asyncResult) { } public override int EndRead(IAsyncResult asyncResult) { throw null; } public override void EndWrite(IAsyncResult asyncResult) { } public override void Flush() { } public override System.Threading.Tasks.Task FlushAsync(System.Threading.CancellationToken cancellationToken) { throw null; } public override int Read(byte[] buffer, int offset, int count) { throw null; } public override long Seek(long offset, System.IO.SeekOrigin origin) { throw null; } public override void SetLength(long value) { } public override void Write(byte[] buffer, int offset, int count) { } } public enum ProtectionLevel { None = 0, Sign = 1, EncryptAndSign = 2 } public delegate bool RemoteCertificateValidationCallback(object sender, System.Security.Cryptography.X509Certificates.X509Certificate certificate, System.Security.Cryptography.X509Certificates.X509Chain chain, System.Net.Security.SslPolicyErrors sslPolicyErrors); public class SslServerAuthenticationOptions { public bool AllowRenegotiation { get { throw null; } set { } } public X509Certificate ServerCertificate { get { throw null; } set { } } public bool ClientCertificateRequired { get { throw null; } set { } } public SslProtocols EnabledSslProtocols { get { throw null; } set { } } public X509RevocationMode CertificateRevocationCheckMode { get { throw null; } set { } } public List<SslApplicationProtocol> ApplicationProtocols { get { throw null; } set { } } public RemoteCertificateValidationCallback RemoteCertificateValidationCallback { get { throw null; } set { } } public EncryptionPolicy EncryptionPolicy { get { throw null; } set { } } } public partial class SslClientAuthenticationOptions { public bool AllowRenegotiation { get { throw null; } set { } } public string TargetHost { get { throw null; } set { } } public X509CertificateCollection ClientCertificates { get { throw null; } set { } } public LocalCertificateSelectionCallback LocalCertificateSelectionCallback { get { throw null; } set { } } public SslProtocols EnabledSslProtocols { get { throw null; } set { } } public X509RevocationMode CertificateRevocationCheckMode { get { throw null; } set { } } public List<SslApplicationProtocol> ApplicationProtocols { get { throw null; } set { } } public RemoteCertificateValidationCallback RemoteCertificateValidationCallback { get { throw null; } set { } } public EncryptionPolicy EncryptionPolicy { get { throw null; } set { } } } public readonly partial struct SslApplicationProtocol : IEquatable<SslApplicationProtocol> { public static readonly SslApplicationProtocol Http2; public static readonly SslApplicationProtocol Http11; public SslApplicationProtocol(byte[] protocol) { } public SslApplicationProtocol(string protocol) { } public ReadOnlyMemory<byte> Protocol { get { throw null; } } public bool Equals(SslApplicationProtocol other) { throw null; } public override bool Equals(object obj) { throw null; } public override int GetHashCode() { throw null; } public override string ToString() { throw null; } public static bool operator ==(SslApplicationProtocol left, SslApplicationProtocol right) { throw null; } public static bool operator !=(SslApplicationProtocol left, SslApplicationProtocol right) { throw null; } } public partial class SslStream : AuthenticatedStream { public SslStream(System.IO.Stream innerStream) : base(innerStream, false) { } public SslStream(System.IO.Stream innerStream, bool leaveInnerStreamOpen) : base(innerStream, leaveInnerStreamOpen) { } public SslStream(System.IO.Stream innerStream, bool leaveInnerStreamOpen, System.Net.Security.RemoteCertificateValidationCallback userCertificateValidationCallback) : base(innerStream, leaveInnerStreamOpen) { } public SslStream(System.IO.Stream innerStream, bool leaveInnerStreamOpen, System.Net.Security.RemoteCertificateValidationCallback userCertificateValidationCallback, System.Net.Security.LocalCertificateSelectionCallback userCertificateSelectionCallback) : base(innerStream, leaveInnerStreamOpen) { } public SslStream(System.IO.Stream innerStream, bool leaveInnerStreamOpen, System.Net.Security.RemoteCertificateValidationCallback userCertificateValidationCallback, System.Net.Security.LocalCertificateSelectionCallback userCertificateSelectionCallback, System.Net.Security.EncryptionPolicy encryptionPolicy) : base(innerStream, leaveInnerStreamOpen) { } public SslApplicationProtocol NegotiatedApplicationProtocol { get { throw null; } } public override bool CanRead { get { throw null; } } public override bool CanSeek { get { throw null; } } public override bool CanTimeout { get { throw null; } } public override bool CanWrite { get { throw null; } } public virtual bool CheckCertRevocationStatus { get { throw null; } } public virtual System.Security.Authentication.CipherAlgorithmType CipherAlgorithm { get { throw null; } } public virtual int CipherStrength { get { throw null; } } public virtual System.Security.Authentication.HashAlgorithmType HashAlgorithm { get { throw null; } } public virtual int HashStrength { get { throw null; } } public override bool IsAuthenticated { get { throw null; } } public override bool IsEncrypted { get { throw null; } } public override bool IsMutuallyAuthenticated { get { throw null; } } public override bool IsServer { get { throw null; } } public override bool IsSigned { get { throw null; } } public virtual System.Security.Authentication.ExchangeAlgorithmType KeyExchangeAlgorithm { get { throw null; } } public virtual int KeyExchangeStrength { get { throw null; } } public override long Length { get { throw null; } } public virtual System.Security.Cryptography.X509Certificates.X509Certificate LocalCertificate { get { throw null; } } public override long Position { get { throw null; } set { } } public override int ReadTimeout { get { throw null; } set { } } public virtual System.Security.Cryptography.X509Certificates.X509Certificate RemoteCertificate { get { throw null; } } public virtual System.Security.Authentication.SslProtocols SslProtocol { get { throw null; } } public System.Net.TransportContext TransportContext { get { throw null; } } public override int WriteTimeout { get { throw null; } set { } } public virtual void AuthenticateAsClient(string targetHost) { } public virtual void AuthenticateAsClient(string targetHost, System.Security.Cryptography.X509Certificates.X509CertificateCollection clientCertificates, System.Security.Authentication.SslProtocols enabledSslProtocols, bool checkCertificateRevocation) { } public virtual void AuthenticateAsClient(string targetHost, System.Security.Cryptography.X509Certificates.X509CertificateCollection clientCertificates, bool checkCertificateRevocation) { } public virtual void AuthenticateAsServer(System.Security.Cryptography.X509Certificates.X509Certificate serverCertificate) { } public virtual void AuthenticateAsServer(System.Security.Cryptography.X509Certificates.X509Certificate serverCertificate, bool clientCertificateRequired, System.Security.Authentication.SslProtocols enabledSslProtocols, bool checkCertificateRevocation) { } public virtual void AuthenticateAsServer(System.Security.Cryptography.X509Certificates.X509Certificate serverCertificate, bool clientCertificateRequired, bool checkCertificateRevocation) { } public virtual System.Threading.Tasks.Task AuthenticateAsClientAsync(string targetHost) { throw null; } public virtual System.Threading.Tasks.Task AuthenticateAsClientAsync(string targetHost, System.Security.Cryptography.X509Certificates.X509CertificateCollection clientCertificates, System.Security.Authentication.SslProtocols enabledSslProtocols, bool checkCertificateRevocation) { throw null; } public virtual System.Threading.Tasks.Task AuthenticateAsClientAsync(string targetHost, System.Security.Cryptography.X509Certificates.X509CertificateCollection clientCertificates, bool checkCertificateRevocation) { throw null; } public Task AuthenticateAsClientAsync(SslClientAuthenticationOptions sslClientAuthenticationOptions, CancellationToken cancellationToken) { throw null; } public virtual System.Threading.Tasks.Task AuthenticateAsServerAsync(System.Security.Cryptography.X509Certificates.X509Certificate serverCertificate) { throw null; } public virtual System.Threading.Tasks.Task AuthenticateAsServerAsync(System.Security.Cryptography.X509Certificates.X509Certificate serverCertificate, bool clientCertificateRequired, System.Security.Authentication.SslProtocols enabledSslProtocols, bool checkCertificateRevocation) { throw null; } public virtual System.Threading.Tasks.Task AuthenticateAsServerAsync(System.Security.Cryptography.X509Certificates.X509Certificate serverCertificate, bool clientCertificateRequired, bool checkCertificateRevocation) { throw null; } public Task AuthenticateAsServerAsync(SslServerAuthenticationOptions sslClientAuthenticationOptions, CancellationToken cancellationToken) { throw null; } public virtual System.IAsyncResult BeginAuthenticateAsClient(string targetHost, System.AsyncCallback asyncCallback, object asyncState) { throw null; } public virtual System.IAsyncResult BeginAuthenticateAsClient(string targetHost, System.Security.Cryptography.X509Certificates.X509CertificateCollection clientCertificates, System.Security.Authentication.SslProtocols enabledSslProtocols, bool checkCertificateRevocation, System.AsyncCallback asyncCallback, object asyncState) { throw null; } public virtual System.IAsyncResult BeginAuthenticateAsClient(string targetHost, System.Security.Cryptography.X509Certificates.X509CertificateCollection clientCertificates, bool checkCertificateRevocation, System.AsyncCallback asyncCallback, object asyncState) { throw null; } public virtual System.IAsyncResult BeginAuthenticateAsServer(System.Security.Cryptography.X509Certificates.X509Certificate serverCertificate, System.AsyncCallback asyncCallback, object asyncState) { throw null; } public virtual System.IAsyncResult BeginAuthenticateAsServer(System.Security.Cryptography.X509Certificates.X509Certificate serverCertificate, bool clientCertificateRequired, System.Security.Authentication.SslProtocols enabledSslProtocols, bool checkCertificateRevocation, System.AsyncCallback asyncCallback, object asyncState) { throw null; } public virtual System.IAsyncResult BeginAuthenticateAsServer(System.Security.Cryptography.X509Certificates.X509Certificate serverCertificate, bool clientCertificateRequired, bool checkCertificateRevocation, System.AsyncCallback asyncCallback, object asyncState) { throw null; } public override IAsyncResult BeginRead(byte[] buffer, int offset, int count, AsyncCallback asyncCallback, object asyncState) { throw null; } public override IAsyncResult BeginWrite(byte[] buffer, int offset, int count, AsyncCallback asyncCallback, object asyncState) { throw null; } protected override void Dispose(bool disposing) { } public virtual void EndAuthenticateAsClient(IAsyncResult asyncResult) { } public virtual void EndAuthenticateAsServer(IAsyncResult asyncResult) { } public override int EndRead(IAsyncResult asyncResult) { throw null; } public override void EndWrite(IAsyncResult asyncResult) { } public override void Flush() { } public override System.Threading.Tasks.Task FlushAsync(System.Threading.CancellationToken cancellationToken) { throw null; } public override int Read(byte[] buffer, int offset, int count) { throw null; } public override long Seek(long offset, System.IO.SeekOrigin origin) { throw null; } public override void SetLength(long value) { } public virtual System.Threading.Tasks.Task ShutdownAsync() { throw null; } public void Write(byte[] buffer) { } public override void Write(byte[] buffer, int offset, int count) { } } } namespace System.Security.Authentication { public partial class AuthenticationException : System.SystemException { public AuthenticationException() { } public AuthenticationException(string message) { } public AuthenticationException(string message, System.Exception innerException) { } protected AuthenticationException(System.Runtime.Serialization.SerializationInfo serializationInfo, System.Runtime.Serialization.StreamingContext streamingContext) { } } public partial class InvalidCredentialException : System.Security.Authentication.AuthenticationException { public InvalidCredentialException() { } public InvalidCredentialException(string message) { } public InvalidCredentialException(string message, System.Exception innerException) { } protected InvalidCredentialException(System.Runtime.Serialization.SerializationInfo serializationInfo, System.Runtime.Serialization.StreamingContext streamingContext) { } } } namespace System.Security.Authentication.ExtendedProtection { public partial class ExtendedProtectionPolicy : System.Runtime.Serialization.ISerializable { public ExtendedProtectionPolicy(System.Security.Authentication.ExtendedProtection.PolicyEnforcement policyEnforcement) { } public ExtendedProtectionPolicy(System.Security.Authentication.ExtendedProtection.PolicyEnforcement policyEnforcement, System.Security.Authentication.ExtendedProtection.ChannelBinding customChannelBinding) { } public ExtendedProtectionPolicy(System.Security.Authentication.ExtendedProtection.PolicyEnforcement policyEnforcement, System.Security.Authentication.ExtendedProtection.ProtectionScenario protectionScenario, System.Collections.ICollection customServiceNames) { } public ExtendedProtectionPolicy(System.Security.Authentication.ExtendedProtection.PolicyEnforcement policyEnforcement, System.Security.Authentication.ExtendedProtection.ProtectionScenario protectionScenario, System.Security.Authentication.ExtendedProtection.ServiceNameCollection customServiceNames) { } protected ExtendedProtectionPolicy(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context) { } public System.Security.Authentication.ExtendedProtection.ChannelBinding CustomChannelBinding { get { throw null; } } public System.Security.Authentication.ExtendedProtection.ServiceNameCollection CustomServiceNames { get { throw null; } } public static bool OSSupportsExtendedProtection { get { throw null; } } public System.Security.Authentication.ExtendedProtection.PolicyEnforcement PolicyEnforcement { get { throw null; } } public System.Security.Authentication.ExtendedProtection.ProtectionScenario ProtectionScenario { get { throw null; } } void System.Runtime.Serialization.ISerializable.GetObjectData(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context) { } public override string ToString() { throw null; } } public enum PolicyEnforcement { Always = 2, Never = 0, WhenSupported = 1, } public enum ProtectionScenario { TransportSelected = 0, TrustedProxy = 1, } public partial class ServiceNameCollection : System.Collections.ReadOnlyCollectionBase { public ServiceNameCollection(System.Collections.ICollection items) { } public bool Contains(string searchServiceName) { throw null; } public System.Security.Authentication.ExtendedProtection.ServiceNameCollection Merge(System.Collections.IEnumerable serviceNames) { throw null; } public System.Security.Authentication.ExtendedProtection.ServiceNameCollection Merge(string serviceName) { throw null; } } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gax = Google.Api.Gax; using gaxgrpc = Google.Api.Gax.Grpc; using lro = Google.LongRunning; using proto = Google.Protobuf; using grpccore = Grpc.Core; using grpcinter = Grpc.Core.Interceptors; using sys = System; using scg = System.Collections.Generic; using sco = System.Collections.ObjectModel; using st = System.Threading; using stt = System.Threading.Tasks; namespace Google.Cloud.Compute.V1 { /// <summary>Settings for <see cref="RegionNetworkEndpointGroupsClient"/> instances.</summary> public sealed partial class RegionNetworkEndpointGroupsSettings : gaxgrpc::ServiceSettingsBase { /// <summary>Get a new instance of the default <see cref="RegionNetworkEndpointGroupsSettings"/>.</summary> /// <returns>A new instance of the default <see cref="RegionNetworkEndpointGroupsSettings"/>.</returns> public static RegionNetworkEndpointGroupsSettings GetDefault() => new RegionNetworkEndpointGroupsSettings(); /// <summary> /// Constructs a new <see cref="RegionNetworkEndpointGroupsSettings"/> object with default settings. /// </summary> public RegionNetworkEndpointGroupsSettings() { } private RegionNetworkEndpointGroupsSettings(RegionNetworkEndpointGroupsSettings existing) : base(existing) { gax::GaxPreconditions.CheckNotNull(existing, nameof(existing)); DeleteSettings = existing.DeleteSettings; DeleteOperationsSettings = existing.DeleteOperationsSettings.Clone(); GetSettings = existing.GetSettings; InsertSettings = existing.InsertSettings; InsertOperationsSettings = existing.InsertOperationsSettings.Clone(); ListSettings = existing.ListSettings; OnCopy(existing); } partial void OnCopy(RegionNetworkEndpointGroupsSettings existing); /// <summary> /// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to /// <c>RegionNetworkEndpointGroupsClient.Delete</c> and <c>RegionNetworkEndpointGroupsClient.DeleteAsync</c>. /// </summary> /// <remarks> /// <list type="bullet"> /// <item><description>This call will not be retried.</description></item> /// <item><description>No timeout is applied.</description></item> /// </list> /// </remarks> public gaxgrpc::CallSettings DeleteSettings { get; set; } = gaxgrpc::CallSettings.FromExpiration(gax::Expiration.None); /// <summary> /// Long Running Operation settings for calls to <c>RegionNetworkEndpointGroupsClient.Delete</c> and /// <c>RegionNetworkEndpointGroupsClient.DeleteAsync</c>. /// </summary> /// <remarks> /// Uses default <see cref="gax::PollSettings"/> of: /// <list type="bullet"> /// <item><description>Initial delay: 20 seconds.</description></item> /// <item><description>Delay multiplier: 1.5</description></item> /// <item><description>Maximum delay: 45 seconds.</description></item> /// <item><description>Total timeout: 24 hours.</description></item> /// </list> /// </remarks> public lro::OperationsSettings DeleteOperationsSettings { get; set; } = new lro::OperationsSettings { DefaultPollSettings = new gax::PollSettings(gax::Expiration.FromTimeout(sys::TimeSpan.FromHours(24)), sys::TimeSpan.FromSeconds(20), 1.5, sys::TimeSpan.FromSeconds(45)), }; /// <summary> /// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to /// <c>RegionNetworkEndpointGroupsClient.Get</c> and <c>RegionNetworkEndpointGroupsClient.GetAsync</c>. /// </summary> /// <remarks> /// <list type="bullet"> /// <item><description>This call will not be retried.</description></item> /// <item><description>No timeout is applied.</description></item> /// </list> /// </remarks> public gaxgrpc::CallSettings GetSettings { get; set; } = gaxgrpc::CallSettings.FromExpiration(gax::Expiration.None); /// <summary> /// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to /// <c>RegionNetworkEndpointGroupsClient.Insert</c> and <c>RegionNetworkEndpointGroupsClient.InsertAsync</c>. /// </summary> /// <remarks> /// <list type="bullet"> /// <item><description>This call will not be retried.</description></item> /// <item><description>No timeout is applied.</description></item> /// </list> /// </remarks> public gaxgrpc::CallSettings InsertSettings { get; set; } = gaxgrpc::CallSettings.FromExpiration(gax::Expiration.None); /// <summary> /// Long Running Operation settings for calls to <c>RegionNetworkEndpointGroupsClient.Insert</c> and /// <c>RegionNetworkEndpointGroupsClient.InsertAsync</c>. /// </summary> /// <remarks> /// Uses default <see cref="gax::PollSettings"/> of: /// <list type="bullet"> /// <item><description>Initial delay: 20 seconds.</description></item> /// <item><description>Delay multiplier: 1.5</description></item> /// <item><description>Maximum delay: 45 seconds.</description></item> /// <item><description>Total timeout: 24 hours.</description></item> /// </list> /// </remarks> public lro::OperationsSettings InsertOperationsSettings { get; set; } = new lro::OperationsSettings { DefaultPollSettings = new gax::PollSettings(gax::Expiration.FromTimeout(sys::TimeSpan.FromHours(24)), sys::TimeSpan.FromSeconds(20), 1.5, sys::TimeSpan.FromSeconds(45)), }; /// <summary> /// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to /// <c>RegionNetworkEndpointGroupsClient.List</c> and <c>RegionNetworkEndpointGroupsClient.ListAsync</c>. /// </summary> /// <remarks> /// <list type="bullet"> /// <item><description>This call will not be retried.</description></item> /// <item><description>No timeout is applied.</description></item> /// </list> /// </remarks> public gaxgrpc::CallSettings ListSettings { get; set; } = gaxgrpc::CallSettings.FromExpiration(gax::Expiration.None); /// <summary>Creates a deep clone of this object, with all the same property values.</summary> /// <returns>A deep clone of this <see cref="RegionNetworkEndpointGroupsSettings"/> object.</returns> public RegionNetworkEndpointGroupsSettings Clone() => new RegionNetworkEndpointGroupsSettings(this); } /// <summary> /// Builder class for <see cref="RegionNetworkEndpointGroupsClient"/> to provide simple configuration of /// credentials, endpoint etc. /// </summary> public sealed partial class RegionNetworkEndpointGroupsClientBuilder : gaxgrpc::ClientBuilderBase<RegionNetworkEndpointGroupsClient> { /// <summary>The settings to use for RPCs, or <c>null</c> for the default settings.</summary> public RegionNetworkEndpointGroupsSettings Settings { get; set; } /// <summary>Creates a new builder with default settings.</summary> public RegionNetworkEndpointGroupsClientBuilder() { UseJwtAccessWithScopes = RegionNetworkEndpointGroupsClient.UseJwtAccessWithScopes; } partial void InterceptBuild(ref RegionNetworkEndpointGroupsClient client); partial void InterceptBuildAsync(st::CancellationToken cancellationToken, ref stt::Task<RegionNetworkEndpointGroupsClient> task); /// <summary>Builds the resulting client.</summary> public override RegionNetworkEndpointGroupsClient Build() { RegionNetworkEndpointGroupsClient client = null; InterceptBuild(ref client); return client ?? BuildImpl(); } /// <summary>Builds the resulting client asynchronously.</summary> public override stt::Task<RegionNetworkEndpointGroupsClient> BuildAsync(st::CancellationToken cancellationToken = default) { stt::Task<RegionNetworkEndpointGroupsClient> task = null; InterceptBuildAsync(cancellationToken, ref task); return task ?? BuildAsyncImpl(cancellationToken); } private RegionNetworkEndpointGroupsClient BuildImpl() { Validate(); grpccore::CallInvoker callInvoker = CreateCallInvoker(); return RegionNetworkEndpointGroupsClient.Create(callInvoker, Settings); } private async stt::Task<RegionNetworkEndpointGroupsClient> BuildAsyncImpl(st::CancellationToken cancellationToken) { Validate(); grpccore::CallInvoker callInvoker = await CreateCallInvokerAsync(cancellationToken).ConfigureAwait(false); return RegionNetworkEndpointGroupsClient.Create(callInvoker, Settings); } /// <summary>Returns the endpoint for this builder type, used if no endpoint is otherwise specified.</summary> protected override string GetDefaultEndpoint() => RegionNetworkEndpointGroupsClient.DefaultEndpoint; /// <summary> /// Returns the default scopes for this builder type, used if no scopes are otherwise specified. /// </summary> protected override scg::IReadOnlyList<string> GetDefaultScopes() => RegionNetworkEndpointGroupsClient.DefaultScopes; /// <summary>Returns the channel pool to use when no other options are specified.</summary> protected override gaxgrpc::ChannelPool GetChannelPool() => RegionNetworkEndpointGroupsClient.ChannelPool; /// <summary>Returns the default <see cref="gaxgrpc::GrpcAdapter"/>to use if not otherwise specified.</summary> protected override gaxgrpc::GrpcAdapter DefaultGrpcAdapter => ComputeRestAdapter.ComputeAdapter; } /// <summary>RegionNetworkEndpointGroups client wrapper, for convenient use.</summary> /// <remarks> /// The RegionNetworkEndpointGroups API. /// </remarks> public abstract partial class RegionNetworkEndpointGroupsClient { /// <summary> /// The default endpoint for the RegionNetworkEndpointGroups service, which is a host of /// "compute.googleapis.com" and a port of 443. /// </summary> public static string DefaultEndpoint { get; } = "compute.googleapis.com:443"; /// <summary>The default RegionNetworkEndpointGroups scopes.</summary> /// <remarks> /// The default RegionNetworkEndpointGroups scopes are: /// <list type="bullet"> /// <item><description>https://www.googleapis.com/auth/compute</description></item> /// <item><description>https://www.googleapis.com/auth/cloud-platform</description></item> /// </list> /// </remarks> public static scg::IReadOnlyList<string> DefaultScopes { get; } = new sco::ReadOnlyCollection<string>(new string[] { "https://www.googleapis.com/auth/compute", "https://www.googleapis.com/auth/cloud-platform", }); internal static gaxgrpc::ChannelPool ChannelPool { get; } = new gaxgrpc::ChannelPool(DefaultScopes, UseJwtAccessWithScopes); internal static bool UseJwtAccessWithScopes { get { bool useJwtAccessWithScopes = true; MaybeUseJwtAccessWithScopes(ref useJwtAccessWithScopes); return useJwtAccessWithScopes; } } static partial void MaybeUseJwtAccessWithScopes(ref bool useJwtAccessWithScopes); /// <summary> /// Asynchronously creates a <see cref="RegionNetworkEndpointGroupsClient"/> using the default credentials, /// endpoint and settings. To specify custom credentials or other settings, use /// <see cref="RegionNetworkEndpointGroupsClientBuilder"/>. /// </summary> /// <param name="cancellationToken"> /// The <see cref="st::CancellationToken"/> to use while creating the client. /// </param> /// <returns>The task representing the created <see cref="RegionNetworkEndpointGroupsClient"/>.</returns> public static stt::Task<RegionNetworkEndpointGroupsClient> CreateAsync(st::CancellationToken cancellationToken = default) => new RegionNetworkEndpointGroupsClientBuilder().BuildAsync(cancellationToken); /// <summary> /// Synchronously creates a <see cref="RegionNetworkEndpointGroupsClient"/> using the default credentials, /// endpoint and settings. To specify custom credentials or other settings, use /// <see cref="RegionNetworkEndpointGroupsClientBuilder"/>. /// </summary> /// <returns>The created <see cref="RegionNetworkEndpointGroupsClient"/>.</returns> public static RegionNetworkEndpointGroupsClient Create() => new RegionNetworkEndpointGroupsClientBuilder().Build(); /// <summary> /// Creates a <see cref="RegionNetworkEndpointGroupsClient"/> which uses the specified call invoker for remote /// operations. /// </summary> /// <param name="callInvoker"> /// The <see cref="grpccore::CallInvoker"/> for remote operations. Must not be null. /// </param> /// <param name="settings">Optional <see cref="RegionNetworkEndpointGroupsSettings"/>.</param> /// <returns>The created <see cref="RegionNetworkEndpointGroupsClient"/>.</returns> internal static RegionNetworkEndpointGroupsClient Create(grpccore::CallInvoker callInvoker, RegionNetworkEndpointGroupsSettings settings = null) { gax::GaxPreconditions.CheckNotNull(callInvoker, nameof(callInvoker)); grpcinter::Interceptor interceptor = settings?.Interceptor; if (interceptor != null) { callInvoker = grpcinter::CallInvokerExtensions.Intercept(callInvoker, interceptor); } RegionNetworkEndpointGroups.RegionNetworkEndpointGroupsClient grpcClient = new RegionNetworkEndpointGroups.RegionNetworkEndpointGroupsClient(callInvoker); return new RegionNetworkEndpointGroupsClientImpl(grpcClient, settings); } /// <summary> /// Shuts down any channels automatically created by <see cref="Create()"/> and /// <see cref="CreateAsync(st::CancellationToken)"/>. Channels which weren't automatically created are not /// affected. /// </summary> /// <remarks> /// After calling this method, further calls to <see cref="Create()"/> and /// <see cref="CreateAsync(st::CancellationToken)"/> will create new channels, which could in turn be shut down /// by another call to this method. /// </remarks> /// <returns>A task representing the asynchronous shutdown operation.</returns> public static stt::Task ShutdownDefaultChannelsAsync() => ChannelPool.ShutdownChannelsAsync(); /// <summary>The underlying gRPC RegionNetworkEndpointGroups client</summary> public virtual RegionNetworkEndpointGroups.RegionNetworkEndpointGroupsClient GrpcClient => throw new sys::NotImplementedException(); /// <summary> /// Deletes the specified network endpoint group. Note that the NEG cannot be deleted if it is configured as a backend of a backend service. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual lro::Operation<Operation, Operation> Delete(DeleteRegionNetworkEndpointGroupRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Deletes the specified network endpoint group. Note that the NEG cannot be deleted if it is configured as a backend of a backend service. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<lro::Operation<Operation, Operation>> DeleteAsync(DeleteRegionNetworkEndpointGroupRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Deletes the specified network endpoint group. Note that the NEG cannot be deleted if it is configured as a backend of a backend service. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<lro::Operation<Operation, Operation>> DeleteAsync(DeleteRegionNetworkEndpointGroupRequest request, st::CancellationToken cancellationToken) => DeleteAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary>The long-running operations client for <c>Delete</c>.</summary> public virtual lro::OperationsClient DeleteOperationsClient => throw new sys::NotImplementedException(); /// <summary> /// Poll an operation once, using an <c>operationName</c> from a previous invocation of <c>Delete</c>. /// </summary> /// <param name="operationName"> /// The name of a previously invoked operation. Must not be <c>null</c> or empty. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The result of polling the operation.</returns> public virtual lro::Operation<Operation, Operation> PollOnceDelete(string operationName, gaxgrpc::CallSettings callSettings = null) => lro::Operation<Operation, Operation>.PollOnceFromName(gax::GaxPreconditions.CheckNotNullOrEmpty(operationName, nameof(operationName)), DeleteOperationsClient, callSettings); /// <summary> /// Asynchronously poll an operation once, using an <c>operationName</c> from a previous invocation of <c>Delete</c> /// . /// </summary> /// <param name="operationName"> /// The name of a previously invoked operation. Must not be <c>null</c> or empty. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A task representing the result of polling the operation.</returns> public virtual stt::Task<lro::Operation<Operation, Operation>> PollOnceDeleteAsync(string operationName, gaxgrpc::CallSettings callSettings = null) => lro::Operation<Operation, Operation>.PollOnceFromNameAsync(gax::GaxPreconditions.CheckNotNullOrEmpty(operationName, nameof(operationName)), DeleteOperationsClient, callSettings); /// <summary> /// Deletes the specified network endpoint group. Note that the NEG cannot be deleted if it is configured as a backend of a backend service. /// </summary> /// <param name="project"> /// Project ID for this request. /// </param> /// <param name="region"> /// The name of the region where the network endpoint group is located. It should comply with RFC1035. /// </param> /// <param name="networkEndpointGroup"> /// The name of the network endpoint group to delete. It should comply with RFC1035. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual lro::Operation<Operation, Operation> Delete(string project, string region, string networkEndpointGroup, gaxgrpc::CallSettings callSettings = null) => Delete(new DeleteRegionNetworkEndpointGroupRequest { NetworkEndpointGroup = gax::GaxPreconditions.CheckNotNullOrEmpty(networkEndpointGroup, nameof(networkEndpointGroup)), Project = gax::GaxPreconditions.CheckNotNullOrEmpty(project, nameof(project)), Region = gax::GaxPreconditions.CheckNotNullOrEmpty(region, nameof(region)), }, callSettings); /// <summary> /// Deletes the specified network endpoint group. Note that the NEG cannot be deleted if it is configured as a backend of a backend service. /// </summary> /// <param name="project"> /// Project ID for this request. /// </param> /// <param name="region"> /// The name of the region where the network endpoint group is located. It should comply with RFC1035. /// </param> /// <param name="networkEndpointGroup"> /// The name of the network endpoint group to delete. It should comply with RFC1035. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<lro::Operation<Operation, Operation>> DeleteAsync(string project, string region, string networkEndpointGroup, gaxgrpc::CallSettings callSettings = null) => DeleteAsync(new DeleteRegionNetworkEndpointGroupRequest { NetworkEndpointGroup = gax::GaxPreconditions.CheckNotNullOrEmpty(networkEndpointGroup, nameof(networkEndpointGroup)), Project = gax::GaxPreconditions.CheckNotNullOrEmpty(project, nameof(project)), Region = gax::GaxPreconditions.CheckNotNullOrEmpty(region, nameof(region)), }, callSettings); /// <summary> /// Deletes the specified network endpoint group. Note that the NEG cannot be deleted if it is configured as a backend of a backend service. /// </summary> /// <param name="project"> /// Project ID for this request. /// </param> /// <param name="region"> /// The name of the region where the network endpoint group is located. It should comply with RFC1035. /// </param> /// <param name="networkEndpointGroup"> /// The name of the network endpoint group to delete. It should comply with RFC1035. /// </param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<lro::Operation<Operation, Operation>> DeleteAsync(string project, string region, string networkEndpointGroup, st::CancellationToken cancellationToken) => DeleteAsync(project, region, networkEndpointGroup, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Returns the specified network endpoint group. Gets a list of available network endpoint groups by making a list() request. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual NetworkEndpointGroup Get(GetRegionNetworkEndpointGroupRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Returns the specified network endpoint group. Gets a list of available network endpoint groups by making a list() request. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<NetworkEndpointGroup> GetAsync(GetRegionNetworkEndpointGroupRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Returns the specified network endpoint group. Gets a list of available network endpoint groups by making a list() request. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<NetworkEndpointGroup> GetAsync(GetRegionNetworkEndpointGroupRequest request, st::CancellationToken cancellationToken) => GetAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Returns the specified network endpoint group. Gets a list of available network endpoint groups by making a list() request. /// </summary> /// <param name="project"> /// Project ID for this request. /// </param> /// <param name="region"> /// The name of the region where the network endpoint group is located. It should comply with RFC1035. /// </param> /// <param name="networkEndpointGroup"> /// The name of the network endpoint group. It should comply with RFC1035. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual NetworkEndpointGroup Get(string project, string region, string networkEndpointGroup, gaxgrpc::CallSettings callSettings = null) => Get(new GetRegionNetworkEndpointGroupRequest { NetworkEndpointGroup = gax::GaxPreconditions.CheckNotNullOrEmpty(networkEndpointGroup, nameof(networkEndpointGroup)), Project = gax::GaxPreconditions.CheckNotNullOrEmpty(project, nameof(project)), Region = gax::GaxPreconditions.CheckNotNullOrEmpty(region, nameof(region)), }, callSettings); /// <summary> /// Returns the specified network endpoint group. Gets a list of available network endpoint groups by making a list() request. /// </summary> /// <param name="project"> /// Project ID for this request. /// </param> /// <param name="region"> /// The name of the region where the network endpoint group is located. It should comply with RFC1035. /// </param> /// <param name="networkEndpointGroup"> /// The name of the network endpoint group. It should comply with RFC1035. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<NetworkEndpointGroup> GetAsync(string project, string region, string networkEndpointGroup, gaxgrpc::CallSettings callSettings = null) => GetAsync(new GetRegionNetworkEndpointGroupRequest { NetworkEndpointGroup = gax::GaxPreconditions.CheckNotNullOrEmpty(networkEndpointGroup, nameof(networkEndpointGroup)), Project = gax::GaxPreconditions.CheckNotNullOrEmpty(project, nameof(project)), Region = gax::GaxPreconditions.CheckNotNullOrEmpty(region, nameof(region)), }, callSettings); /// <summary> /// Returns the specified network endpoint group. Gets a list of available network endpoint groups by making a list() request. /// </summary> /// <param name="project"> /// Project ID for this request. /// </param> /// <param name="region"> /// The name of the region where the network endpoint group is located. It should comply with RFC1035. /// </param> /// <param name="networkEndpointGroup"> /// The name of the network endpoint group. It should comply with RFC1035. /// </param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<NetworkEndpointGroup> GetAsync(string project, string region, string networkEndpointGroup, st::CancellationToken cancellationToken) => GetAsync(project, region, networkEndpointGroup, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Creates a network endpoint group in the specified project using the parameters that are included in the request. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual lro::Operation<Operation, Operation> Insert(InsertRegionNetworkEndpointGroupRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Creates a network endpoint group in the specified project using the parameters that are included in the request. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<lro::Operation<Operation, Operation>> InsertAsync(InsertRegionNetworkEndpointGroupRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Creates a network endpoint group in the specified project using the parameters that are included in the request. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<lro::Operation<Operation, Operation>> InsertAsync(InsertRegionNetworkEndpointGroupRequest request, st::CancellationToken cancellationToken) => InsertAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary>The long-running operations client for <c>Insert</c>.</summary> public virtual lro::OperationsClient InsertOperationsClient => throw new sys::NotImplementedException(); /// <summary> /// Poll an operation once, using an <c>operationName</c> from a previous invocation of <c>Insert</c>. /// </summary> /// <param name="operationName"> /// The name of a previously invoked operation. Must not be <c>null</c> or empty. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The result of polling the operation.</returns> public virtual lro::Operation<Operation, Operation> PollOnceInsert(string operationName, gaxgrpc::CallSettings callSettings = null) => lro::Operation<Operation, Operation>.PollOnceFromName(gax::GaxPreconditions.CheckNotNullOrEmpty(operationName, nameof(operationName)), InsertOperationsClient, callSettings); /// <summary> /// Asynchronously poll an operation once, using an <c>operationName</c> from a previous invocation of <c>Insert</c> /// . /// </summary> /// <param name="operationName"> /// The name of a previously invoked operation. Must not be <c>null</c> or empty. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A task representing the result of polling the operation.</returns> public virtual stt::Task<lro::Operation<Operation, Operation>> PollOnceInsertAsync(string operationName, gaxgrpc::CallSettings callSettings = null) => lro::Operation<Operation, Operation>.PollOnceFromNameAsync(gax::GaxPreconditions.CheckNotNullOrEmpty(operationName, nameof(operationName)), InsertOperationsClient, callSettings); /// <summary> /// Creates a network endpoint group in the specified project using the parameters that are included in the request. /// </summary> /// <param name="project"> /// Project ID for this request. /// </param> /// <param name="region"> /// The name of the region where you want to create the network endpoint group. It should comply with RFC1035. /// </param> /// <param name="networkEndpointGroupResource"> /// The body resource for this request /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual lro::Operation<Operation, Operation> Insert(string project, string region, NetworkEndpointGroup networkEndpointGroupResource, gaxgrpc::CallSettings callSettings = null) => Insert(new InsertRegionNetworkEndpointGroupRequest { NetworkEndpointGroupResource = gax::GaxPreconditions.CheckNotNull(networkEndpointGroupResource, nameof(networkEndpointGroupResource)), Project = gax::GaxPreconditions.CheckNotNullOrEmpty(project, nameof(project)), Region = gax::GaxPreconditions.CheckNotNullOrEmpty(region, nameof(region)), }, callSettings); /// <summary> /// Creates a network endpoint group in the specified project using the parameters that are included in the request. /// </summary> /// <param name="project"> /// Project ID for this request. /// </param> /// <param name="region"> /// The name of the region where you want to create the network endpoint group. It should comply with RFC1035. /// </param> /// <param name="networkEndpointGroupResource"> /// The body resource for this request /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<lro::Operation<Operation, Operation>> InsertAsync(string project, string region, NetworkEndpointGroup networkEndpointGroupResource, gaxgrpc::CallSettings callSettings = null) => InsertAsync(new InsertRegionNetworkEndpointGroupRequest { NetworkEndpointGroupResource = gax::GaxPreconditions.CheckNotNull(networkEndpointGroupResource, nameof(networkEndpointGroupResource)), Project = gax::GaxPreconditions.CheckNotNullOrEmpty(project, nameof(project)), Region = gax::GaxPreconditions.CheckNotNullOrEmpty(region, nameof(region)), }, callSettings); /// <summary> /// Creates a network endpoint group in the specified project using the parameters that are included in the request. /// </summary> /// <param name="project"> /// Project ID for this request. /// </param> /// <param name="region"> /// The name of the region where you want to create the network endpoint group. It should comply with RFC1035. /// </param> /// <param name="networkEndpointGroupResource"> /// The body resource for this request /// </param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<lro::Operation<Operation, Operation>> InsertAsync(string project, string region, NetworkEndpointGroup networkEndpointGroupResource, st::CancellationToken cancellationToken) => InsertAsync(project, region, networkEndpointGroupResource, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Retrieves the list of regional network endpoint groups available to the specified project in the given region. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A pageable sequence of <see cref="NetworkEndpointGroup"/> resources.</returns> public virtual gax::PagedEnumerable<NetworkEndpointGroupList, NetworkEndpointGroup> List(ListRegionNetworkEndpointGroupsRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Retrieves the list of regional network endpoint groups available to the specified project in the given region. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A pageable asynchronous sequence of <see cref="NetworkEndpointGroup"/> resources.</returns> public virtual gax::PagedAsyncEnumerable<NetworkEndpointGroupList, NetworkEndpointGroup> ListAsync(ListRegionNetworkEndpointGroupsRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Retrieves the list of regional network endpoint groups available to the specified project in the given region. /// </summary> /// <param name="project"> /// Project ID for this request. /// </param> /// <param name="region"> /// The name of the region where the network endpoint group is located. It should comply with RFC1035. /// </param> /// <param name="pageToken"> /// The token returned from the previous request. A value of <c>null</c> or an empty string retrieves the first /// page. /// </param> /// <param name="pageSize"> /// The size of page to request. The response will not be larger than this, but may be smaller. A value of /// <c>null</c> or <c>0</c> uses a server-defined page size. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A pageable sequence of <see cref="NetworkEndpointGroup"/> resources.</returns> public virtual gax::PagedEnumerable<NetworkEndpointGroupList, NetworkEndpointGroup> List(string project, string region, string pageToken = null, int? pageSize = null, gaxgrpc::CallSettings callSettings = null) => List(new ListRegionNetworkEndpointGroupsRequest { Project = gax::GaxPreconditions.CheckNotNullOrEmpty(project, nameof(project)), Region = gax::GaxPreconditions.CheckNotNullOrEmpty(region, nameof(region)), PageToken = pageToken ?? "", PageSize = pageSize ?? 0, }, callSettings); /// <summary> /// Retrieves the list of regional network endpoint groups available to the specified project in the given region. /// </summary> /// <param name="project"> /// Project ID for this request. /// </param> /// <param name="region"> /// The name of the region where the network endpoint group is located. It should comply with RFC1035. /// </param> /// <param name="pageToken"> /// The token returned from the previous request. A value of <c>null</c> or an empty string retrieves the first /// page. /// </param> /// <param name="pageSize"> /// The size of page to request. The response will not be larger than this, but may be smaller. A value of /// <c>null</c> or <c>0</c> uses a server-defined page size. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A pageable asynchronous sequence of <see cref="NetworkEndpointGroup"/> resources.</returns> public virtual gax::PagedAsyncEnumerable<NetworkEndpointGroupList, NetworkEndpointGroup> ListAsync(string project, string region, string pageToken = null, int? pageSize = null, gaxgrpc::CallSettings callSettings = null) => ListAsync(new ListRegionNetworkEndpointGroupsRequest { Project = gax::GaxPreconditions.CheckNotNullOrEmpty(project, nameof(project)), Region = gax::GaxPreconditions.CheckNotNullOrEmpty(region, nameof(region)), PageToken = pageToken ?? "", PageSize = pageSize ?? 0, }, callSettings); } /// <summary>RegionNetworkEndpointGroups client wrapper implementation, for convenient use.</summary> /// <remarks> /// The RegionNetworkEndpointGroups API. /// </remarks> public sealed partial class RegionNetworkEndpointGroupsClientImpl : RegionNetworkEndpointGroupsClient { private readonly gaxgrpc::ApiCall<DeleteRegionNetworkEndpointGroupRequest, Operation> _callDelete; private readonly gaxgrpc::ApiCall<GetRegionNetworkEndpointGroupRequest, NetworkEndpointGroup> _callGet; private readonly gaxgrpc::ApiCall<InsertRegionNetworkEndpointGroupRequest, Operation> _callInsert; private readonly gaxgrpc::ApiCall<ListRegionNetworkEndpointGroupsRequest, NetworkEndpointGroupList> _callList; /// <summary> /// Constructs a client wrapper for the RegionNetworkEndpointGroups service, with the specified gRPC client and /// settings. /// </summary> /// <param name="grpcClient">The underlying gRPC client.</param> /// <param name="settings"> /// The base <see cref="RegionNetworkEndpointGroupsSettings"/> used within this client. /// </param> public RegionNetworkEndpointGroupsClientImpl(RegionNetworkEndpointGroups.RegionNetworkEndpointGroupsClient grpcClient, RegionNetworkEndpointGroupsSettings settings) { GrpcClient = grpcClient; RegionNetworkEndpointGroupsSettings effectiveSettings = settings ?? RegionNetworkEndpointGroupsSettings.GetDefault(); gaxgrpc::ClientHelper clientHelper = new gaxgrpc::ClientHelper(effectiveSettings); DeleteOperationsClient = new lro::OperationsClientImpl(grpcClient.CreateOperationsClientForRegionOperations(), effectiveSettings.DeleteOperationsSettings); InsertOperationsClient = new lro::OperationsClientImpl(grpcClient.CreateOperationsClientForRegionOperations(), effectiveSettings.InsertOperationsSettings); _callDelete = clientHelper.BuildApiCall<DeleteRegionNetworkEndpointGroupRequest, Operation>(grpcClient.DeleteAsync, grpcClient.Delete, effectiveSettings.DeleteSettings).WithGoogleRequestParam("project", request => request.Project).WithGoogleRequestParam("region", request => request.Region).WithGoogleRequestParam("network_endpoint_group", request => request.NetworkEndpointGroup); Modify_ApiCall(ref _callDelete); Modify_DeleteApiCall(ref _callDelete); _callGet = clientHelper.BuildApiCall<GetRegionNetworkEndpointGroupRequest, NetworkEndpointGroup>(grpcClient.GetAsync, grpcClient.Get, effectiveSettings.GetSettings).WithGoogleRequestParam("project", request => request.Project).WithGoogleRequestParam("region", request => request.Region).WithGoogleRequestParam("network_endpoint_group", request => request.NetworkEndpointGroup); Modify_ApiCall(ref _callGet); Modify_GetApiCall(ref _callGet); _callInsert = clientHelper.BuildApiCall<InsertRegionNetworkEndpointGroupRequest, Operation>(grpcClient.InsertAsync, grpcClient.Insert, effectiveSettings.InsertSettings).WithGoogleRequestParam("project", request => request.Project).WithGoogleRequestParam("region", request => request.Region); Modify_ApiCall(ref _callInsert); Modify_InsertApiCall(ref _callInsert); _callList = clientHelper.BuildApiCall<ListRegionNetworkEndpointGroupsRequest, NetworkEndpointGroupList>(grpcClient.ListAsync, grpcClient.List, effectiveSettings.ListSettings).WithGoogleRequestParam("project", request => request.Project).WithGoogleRequestParam("region", request => request.Region); Modify_ApiCall(ref _callList); Modify_ListApiCall(ref _callList); OnConstruction(grpcClient, effectiveSettings, clientHelper); } partial void Modify_ApiCall<TRequest, TResponse>(ref gaxgrpc::ApiCall<TRequest, TResponse> call) where TRequest : class, proto::IMessage<TRequest> where TResponse : class, proto::IMessage<TResponse>; partial void Modify_DeleteApiCall(ref gaxgrpc::ApiCall<DeleteRegionNetworkEndpointGroupRequest, Operation> call); partial void Modify_GetApiCall(ref gaxgrpc::ApiCall<GetRegionNetworkEndpointGroupRequest, NetworkEndpointGroup> call); partial void Modify_InsertApiCall(ref gaxgrpc::ApiCall<InsertRegionNetworkEndpointGroupRequest, Operation> call); partial void Modify_ListApiCall(ref gaxgrpc::ApiCall<ListRegionNetworkEndpointGroupsRequest, NetworkEndpointGroupList> call); partial void OnConstruction(RegionNetworkEndpointGroups.RegionNetworkEndpointGroupsClient grpcClient, RegionNetworkEndpointGroupsSettings effectiveSettings, gaxgrpc::ClientHelper clientHelper); /// <summary>The underlying gRPC RegionNetworkEndpointGroups client</summary> public override RegionNetworkEndpointGroups.RegionNetworkEndpointGroupsClient GrpcClient { get; } partial void Modify_DeleteRegionNetworkEndpointGroupRequest(ref DeleteRegionNetworkEndpointGroupRequest request, ref gaxgrpc::CallSettings settings); partial void Modify_GetRegionNetworkEndpointGroupRequest(ref GetRegionNetworkEndpointGroupRequest request, ref gaxgrpc::CallSettings settings); partial void Modify_InsertRegionNetworkEndpointGroupRequest(ref InsertRegionNetworkEndpointGroupRequest request, ref gaxgrpc::CallSettings settings); partial void Modify_ListRegionNetworkEndpointGroupsRequest(ref ListRegionNetworkEndpointGroupsRequest request, ref gaxgrpc::CallSettings settings); /// <summary>The long-running operations client for <c>Delete</c>.</summary> public override lro::OperationsClient DeleteOperationsClient { get; } /// <summary> /// Deletes the specified network endpoint group. Note that the NEG cannot be deleted if it is configured as a backend of a backend service. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public override lro::Operation<Operation, Operation> Delete(DeleteRegionNetworkEndpointGroupRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_DeleteRegionNetworkEndpointGroupRequest(ref request, ref callSettings); Operation response = _callDelete.Sync(request, callSettings); GetRegionOperationRequest pollRequest = GetRegionOperationRequest.FromInitialResponse(response); request.PopulatePollRequestFields(pollRequest); return new lro::Operation<Operation, Operation>(response.ToLroResponse(pollRequest.ToLroOperationName()), DeleteOperationsClient); } /// <summary> /// Deletes the specified network endpoint group. Note that the NEG cannot be deleted if it is configured as a backend of a backend service. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public override async stt::Task<lro::Operation<Operation, Operation>> DeleteAsync(DeleteRegionNetworkEndpointGroupRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_DeleteRegionNetworkEndpointGroupRequest(ref request, ref callSettings); Operation response = await _callDelete.Async(request, callSettings).ConfigureAwait(false); GetRegionOperationRequest pollRequest = GetRegionOperationRequest.FromInitialResponse(response); request.PopulatePollRequestFields(pollRequest); return new lro::Operation<Operation, Operation>(response.ToLroResponse(pollRequest.ToLroOperationName()), DeleteOperationsClient); } /// <summary> /// Returns the specified network endpoint group. Gets a list of available network endpoint groups by making a list() request. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public override NetworkEndpointGroup Get(GetRegionNetworkEndpointGroupRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_GetRegionNetworkEndpointGroupRequest(ref request, ref callSettings); return _callGet.Sync(request, callSettings); } /// <summary> /// Returns the specified network endpoint group. Gets a list of available network endpoint groups by making a list() request. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public override stt::Task<NetworkEndpointGroup> GetAsync(GetRegionNetworkEndpointGroupRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_GetRegionNetworkEndpointGroupRequest(ref request, ref callSettings); return _callGet.Async(request, callSettings); } /// <summary>The long-running operations client for <c>Insert</c>.</summary> public override lro::OperationsClient InsertOperationsClient { get; } /// <summary> /// Creates a network endpoint group in the specified project using the parameters that are included in the request. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public override lro::Operation<Operation, Operation> Insert(InsertRegionNetworkEndpointGroupRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_InsertRegionNetworkEndpointGroupRequest(ref request, ref callSettings); Operation response = _callInsert.Sync(request, callSettings); GetRegionOperationRequest pollRequest = GetRegionOperationRequest.FromInitialResponse(response); request.PopulatePollRequestFields(pollRequest); return new lro::Operation<Operation, Operation>(response.ToLroResponse(pollRequest.ToLroOperationName()), InsertOperationsClient); } /// <summary> /// Creates a network endpoint group in the specified project using the parameters that are included in the request. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public override async stt::Task<lro::Operation<Operation, Operation>> InsertAsync(InsertRegionNetworkEndpointGroupRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_InsertRegionNetworkEndpointGroupRequest(ref request, ref callSettings); Operation response = await _callInsert.Async(request, callSettings).ConfigureAwait(false); GetRegionOperationRequest pollRequest = GetRegionOperationRequest.FromInitialResponse(response); request.PopulatePollRequestFields(pollRequest); return new lro::Operation<Operation, Operation>(response.ToLroResponse(pollRequest.ToLroOperationName()), InsertOperationsClient); } /// <summary> /// Retrieves the list of regional network endpoint groups available to the specified project in the given region. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A pageable sequence of <see cref="NetworkEndpointGroup"/> resources.</returns> public override gax::PagedEnumerable<NetworkEndpointGroupList, NetworkEndpointGroup> List(ListRegionNetworkEndpointGroupsRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_ListRegionNetworkEndpointGroupsRequest(ref request, ref callSettings); return new gaxgrpc::GrpcPagedEnumerable<ListRegionNetworkEndpointGroupsRequest, NetworkEndpointGroupList, NetworkEndpointGroup>(_callList, request, callSettings); } /// <summary> /// Retrieves the list of regional network endpoint groups available to the specified project in the given region. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A pageable asynchronous sequence of <see cref="NetworkEndpointGroup"/> resources.</returns> public override gax::PagedAsyncEnumerable<NetworkEndpointGroupList, NetworkEndpointGroup> ListAsync(ListRegionNetworkEndpointGroupsRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_ListRegionNetworkEndpointGroupsRequest(ref request, ref callSettings); return new gaxgrpc::GrpcPagedAsyncEnumerable<ListRegionNetworkEndpointGroupsRequest, NetworkEndpointGroupList, NetworkEndpointGroup>(_callList, request, callSettings); } } public partial class ListRegionNetworkEndpointGroupsRequest : gaxgrpc::IPageRequest { /// <inheritdoc/> public int PageSize { get => checked((int)MaxResults); set => MaxResults = checked((uint)value); } } public static partial class RegionNetworkEndpointGroups { public partial class RegionNetworkEndpointGroupsClient { /// <summary> /// Creates a new instance of <see cref="lro::Operations.OperationsClient"/> using the same call invoker as /// this client, delegating to RegionOperations. /// </summary> /// <returns>A new Operations client for the same target as this client.</returns> public virtual lro::Operations.OperationsClient CreateOperationsClientForRegionOperations() => RegionOperations.RegionOperationsClient.CreateOperationsClient(CallInvoker); } } }
using System; using System.Collections; using System.Collections.Generic; using System.Data; using System.Text; using Epi; using Epi.Data; using VariableCollection = Epi.Collections.NamedObjectCollection<Epi.IVariable>; namespace Epi.Core.AnalysisInterpreter { /// <summary> /// Class DataSourceInfo /// </summary> public class DataSourceInfo : IDisposable { #region Private Attributes private DataTable primaryTable = null; private ArrayList joinTableList = null; private VariableCollection standardVariables = null; // DataTable inner = null; private string selectCriteria = String.Empty; private SortCriteria sortCriteria = null; #endregion Private Attributes #region Public Interface #region Constructors /// <summary> /// Constructor for the class /// </summary> public DataSourceInfo() { sortCriteria = new SortCriteria(); } #endregion Constructors #region Public Properties /// <summary> /// Property for the currenet selection criteria /// </summary> public string SelectCriteria { get { return selectCriteria; } } /// <summary> /// Accessor for primary table /// </summary> public DataTable PrimaryTable { get { return primaryTable; } set { primaryTable = value; } } /// <summary> /// Read-only accessor for join table list /// </summary> public ArrayList JoinTableList { get { if (this.joinTableList == null) { joinTableList = new ArrayList(); } return joinTableList; } } /// <summary> /// Read-only accessor for bridge /// </summary> public IDbDriver Db { get { return ((ITable)PrimaryTable).Database; } } /// <summary> /// Sort criteria for the data source /// </summary> public SortCriteria SortCriteria { get { return this.sortCriteria; } } ///// <summary> ///// Sort part ///// </summary> ///// ////zack 11/26/2007 //public string SqlStatementPartSort //{ // get // { // return this.sortCriteria; // } // set // { // this.sortCriteria = value; // } //} /// <summary> /// where part /// </summary> /// <returns></returns> /// //zack add 11/19/2007 public string SqlStatementPartWhere { get { return this.selectCriteria; } set { this.selectCriteria = value; } } /// <summary> /// Order By part /// </summary> /// <returns></returns> public string GetSqlStatementPartSortBy() { string sortClause = sortCriteria.ToString(); if (!string.IsNullOrEmpty(sortClause)) { return (" order by " + sortClause); } else { return string.Empty; } } #endregion Public Properties #region Public Methods /// <summary> /// Releases all resources used by Analysis DataTable /// </summary> public void Dispose() // Implements IDisposable.Dispose { if (joinTableList != null) { joinTableList.Clear(); joinTableList = null; } if (standardVariables != null) { standardVariables.Clear(); standardVariables = null; } } /// <summary> /// Tests the (partial) where clause that the user has entered by trying to execute an sql command /// </summary> /// <remarks> /// This was neccessary because, when the user put in the name of a nonexistant variable /// the user was getting errors from the underlying database that was less than helpful /// At this time it is called by select command and disallows the user to generate a where that doesn't work. /// Defect 187 /// </remarks> /// <param name="expression"></param> /// <returns>bool</returns> public bool ValidWhereClause(Rule_Context pContext, string expression) { try { string testExpression = expression; testExpression = testExpression.Replace(StringLiterals.EPI_REPRESENTATION_OF_TRUE, "1"); testExpression = testExpression.Replace(StringLiterals.EPI_REPRESENTATION_OF_FALSE, "0"); //DataRow[] rows = pContext.GetOutput().Select(testExpression); return true; } catch (Exception) { throw new GeneralException(string.Format(SharedStrings.INVALID_EXPRESSION, "\"" + expression.Trim() + "\"")); } } /// <summary> /// Gets a count of records in a column /// </summary> /// <param name="columnName">Name of the column</param> /// <returns>Number of records in a column</returns> public int GetRecordCount(Rule_Context pContext, string columnName) { #region Input Validation if (string.IsNullOrEmpty(columnName)) { throw new ArgumentNullException("columnName"); } #endregion Input Validation WordBuilder queryBuilder = new WordBuilder(); queryBuilder.Append("select count" + Util.InsertInParantheses(columnName)); queryBuilder.Append(GetSqlStatementPartFrom(pContext)); if (Db.ColumnExists(PrimaryTable.TableName, ColumnNames.REC_STATUS)) { string whereStatement = GetSqlStatementPartWhere(); queryBuilder.Append(whereStatement); } else if (!string.IsNullOrEmpty(selectCriteria)) { string whereClause = " where " + this.selectCriteria; queryBuilder.Append(whereClause); } Query query = Db.CreateQuery(queryBuilder.ToString()); //recast to int before return to remove cast run time error int result = Int32.Parse((Db.ExecuteScalar(query)).ToString()); return result; } /// <summary> /// Logically delete or undelete the records (change RecStatus) that match the criteria /// </summary> /// <remarks> /// Criteria may be '*', which is all records; observes current selection criteria /// </remarks> /// <param name="criteria"></param> /// <param name="delete">Delete or undelete the records that match the criteria</param> /// <returns>The number of rows affected</returns> public int LogicallyDeleteRecords(string criteria, bool delete) { string tableName = PrimaryTable.TableName; string where = GetSqlStatementPartWhere(); if (!string.IsNullOrEmpty(criteria)) { if (string.IsNullOrEmpty(where)) { where = " where "; } else if (criteria != "*") { where += " and "; } where += criteria; } string sql = "Update " + tableName + " set recstatus = " + ((delete) ? "0" : "1") + where; Epi.Data.Query qry = Db.CreateQuery(sql); return Db.ExecuteNonQuery(qry); } /// <summary> /// Physically (permanently) delete the records that match the criteria /// </summary> /// <remarks> /// Criteria may be '*', which is all records; observes current selection criteria /// </remarks> /// <param name="criteria"></param> /// <returns>The number of rows affected</returns> public int PhysicallyDeleteRecords(string criteria) { string tableName = PrimaryTable.TableName; string where = GetSqlStatementPartWhere(); if (!string.IsNullOrEmpty(criteria)) { if (string.IsNullOrEmpty(where)) { where = " where "; } else if (criteria != "*") { where += " and "; } where += criteria; } string sql = "Delete from " + tableName + where; Epi.Data.Query qry = Db.CreateQuery(sql); return Db.ExecuteNonQuery(qry); } /// <summary> /// Gets count of all the records in a table /// </summary> /// <returns>Number of records in a table</returns> public int GetRecordCount(Rule_Context pContext) { return GetRecordCount(pContext, StringLiterals.STAR); } /// <summary> /// Get Data of a single column and all strata /// </summary> /// <remarks> /// If the variable is a defined variable, use the expression /// </remarks> /// <param name="var"></param> /// <param name="strata"></param> /// <returns></returns> public DataTable GetData(Rule_Context pContext, IVariable var, string[] strata) { #region Preconditions if (strata == null) { return GetData(pContext, var); } #endregion StringBuilder sql = new StringBuilder("select "); if (var.VarType == VariableType.DataSource) { sql.Append("[").Append(var.Name).Append("]"); } else { sql.Append(var.Expression); } if (strata.GetLength(0) > 0) { int count = strata.GetLength(0); for (int i = 0; i < count; i++) { sql.Append(", [").Append(strata[i].ToString()).Append("]"); } } sql.Append(this.GetSqlStatementPartFrom(pContext)); sql.Append(this.GetSqlStatementPartWhere()); sql.Append(this.GetSqlStatementPartSortBy()); Query query = Db.CreateQuery(sql.ToString()); return Db.Select(query); } /// <summary> /// Get Data of a single column /// </summary> /// <remarks> /// If the variable is a defined variable, use the expression /// </remarks> /// <param name="var"></param> /// <returns></returns> public DataTable GetData(Rule_Context pContext, IVariable var) { StringBuilder sql = new StringBuilder("select "); if (var.VarType == VariableType.DataSource) { sql.Append("[").Append(var.Name).Append("]"); } else { sql.Append(var.Expression); } sql.Append(this.GetSqlStatementPartFrom(pContext)); sql.Append(this.GetSqlStatementPartWhere()); sql.Append(this.GetSqlStatementPartSortBy()); Query query = Db.CreateQuery(sql.ToString()); return Db.Select(query); } /// <summary> /// Fills the inner table by fetching data, adding standard and global variables. /// </summary> /// <returns>DataTable</returns> public DataTable GetData(Rule_Context pContext) { string queryString = "select * " + this.GetSqlStatementPartFrom(pContext) + this.GetSqlStatementPartWhere() + this.GetSqlStatementPartSortBy(); Query query = Db.CreateQuery(queryString); return Db.Select(query); } string ExcludeMissing(string currentWhereClause, string[] names) { StringBuilder sb = new StringBuilder(currentWhereClause); if (!Configuration.GetNewInstance().Settings.IncludeMissingValues) { foreach (string name in names) { if (string.IsNullOrEmpty(sb.ToString())) { sb.Append(" where "); } else { sb.Append(" and [").Append(name).Append("] is not null"); } } } return sb.ToString(); } /// <summary> /// Returns a 2x2 table for the tables command /// </summary> /// <remarks>The cells are numbered as they are in Epi3</remarks> /// <param name="outcome"></param> /// <param name="exposure"></param> /// <returns>DataTable</returns> public DataSet GetDataSet2x2(Rule_Context pContext, string exposure, string outcome) { DataSet ds = null; try { StringBuilder sb = new StringBuilder("select "); sb.Append(exposure).Append(" AS [").Append(ColumnNames.EXPOSURE).Append("], ["); sb.Append(outcome).Append("] AS [").Append(ColumnNames.OUTCOME); sb.Append("], count([").Append(outcome).Append("]) AS [").Append(ColumnNames.COUNT); sb.Append("] "); sb.Append(GetSqlStatementPartFrom(pContext)); //if there is no project, there will not be a column Rec Status if (PrimaryTable.Columns.Contains(ColumnNames.REC_STATUS)) { sb.Append(ExcludeMissing(GetSqlStatementPartWhere(), new string[] { exposure, outcome })); } //a natively read MDB, and SELECT chosen else if (!string.IsNullOrEmpty(selectCriteria)) { string whereClause = " where " + this.selectCriteria; sb.Append(ExcludeMissing(whereClause, new string[] { exposure, outcome })); } sb.Append(" group by [").Append(exposure).Append("], [").Append(outcome); sb.Append("] order by [").Append(exposure); sb.Append("], [").Append(outcome).Append("];"); DataTable Table2x2 = DBReadExecute.GetDataTable(pContext.CurrentRead.File, sb.ToString()); //Query query = Db.CreateQuery(sb.ToString()); //DataTable Table2x2 = Db.Select(query); Table2x2.TableName = "Table2x2"; ds = new DataSet("dsTable2x2"); ds.Tables.Add(Table2x2); //DataTable distinctOutcomes = DistinctColumn(outcome); //distinctOutcomes.TableName = "DistinctOutcomes"; //ds.Tables.Add(distinctOutcomes); } catch (Exception ex) { throw new GeneralException(SharedStrings.UNABLE_CREATE_2X2, ex); } return ds; } /// <summary> /// Gets the frequencies of an outcome variable stratified by one or more strata /// </summary> /// <param name="var">The outcome variable (IVariable)</param> /// <param name="strata">An array of the names of strata variables</param> /// <param name="weightVar">Weighted variable.</param> /// <returns>The frequency table</returns> public DataSet GetFrequencies(Rule_Context pContext, IVariable var, string[] strata, string weightVar) { StringBuilder sb = new StringBuilder("select "); if (var.VarType != VariableType.DataSource) { sb.Append("(").Append(var.Expression).Append(") AS "); } sb.Append(var.Name).Append(", count(*) AS " + ColumnNames.FREQUENCY); if (!string.IsNullOrEmpty(weightVar)) //if weighted. { sb.Append(", sum(").Append(weightVar).Append(") as ").Append(ColumnNames.WEIGHT); } else { sb.Append(", 1 as ").Append(ColumnNames.WEIGHT); } if (strata != null) { foreach (string stratum in strata) { sb.Append(", ").Append(stratum); } } sb.Append(GetSqlStatementPartFrom(pContext)); string s = null; if (Db.ColumnExists(PrimaryTable.TableName, ColumnNames.REC_STATUS)) { s = GetSqlStatementPartWhere(); } else if (!string.IsNullOrEmpty(selectCriteria)) { s = " where " + this.selectCriteria; } if (!string.IsNullOrEmpty(s)) { sb.Append(s); } if (!Configuration.GetNewInstance().Settings.IncludeMissingValues && strata != null) { sb.Append((string.IsNullOrEmpty(s)) ? " where " : " and "); foreach (string stratum in strata) { sb.Append(stratum).Append(" is not null and "); } sb.Length -= 4; } sb.Append(" group by "); if (var.VarType == VariableType.DataSource) { sb.Append(var.Name); } else { sb.Append(var.Expression); } if (strata != null) { foreach (string stratum in strata) { sb.Append(", ").Append(stratum); } sb.Append(" order by "); foreach (string stratum in strata) { sb.Append(stratum).Append(","); } sb.Append(var.Name).Append(" desc;"); } string queryString = sb.ToString(); if (string.IsNullOrEmpty(queryString)) { return null; } else { DataSet ds = new DataSet("FreqDataSet"); if (strata != null) { ds.Tables.Add(DistinctStrata(pContext, strata)); } DataTable freq = Db.Select(Db.CreateQuery(queryString)); freq.TableName = "Frequencies"; ds.Tables.Add(freq); return ds; } } /// <summary> /// Gets the frequency table of a column /// </summary> /// <param name="variable"></param> /// <returns>The frequency table</returns> public DataTable GetFrequency(Rule_Context pContext, IVariable variable) { string columnName = variable.Name; string s = (string.IsNullOrEmpty(this.SqlStatementPartWhere)) ? " where " : " and "; string queryString = "select " + columnName + ", Count(*) AS " + ColumnNames.FREQUENCY; queryString += " From (select "; if (variable.VarType != VariableType.DataSource) { queryString += variable.Expression + " as "; } queryString += columnName + this.GetSqlStatementPartFrom(pContext) + this.GetSqlStatementPartWhere() + ") as TRecode Group by " + columnName; if (string.IsNullOrEmpty(queryString)) { return null; } else { Query query = Db.CreateQuery(queryString); return Db.Select(query); } } /// <summary> /// Gets a list of all the columns in a view /// </summary> /// <returns>DataTable containing a list of columns</returns> //public List<string> GetColumnNames() //{ // List<string> columnNames = new List<string>(); // columnNames.AddRange(PrimaryTable.TableColumnNames); // foreach (JoinTable joinTable in this.joinTableList) // { // columnNames.AddRange(joinTable.Table.TableColumnNames); // } // return columnNames; //} /// <summary> /// where part /// </summary> /// <returns></returns> //zack change this method to public, 11/21/07 public string GetSqlStatementPartWhere() { string whereClause = string.Empty; if (!string.IsNullOrEmpty(selectCriteria)) { whereClause = " where " + this.selectCriteria; } int scope = Configuration.GetNewInstance().Settings.RecordProcessingScope; if (scope != (int)(RecordProcessingScope.Both)) { whereClause += (string.IsNullOrEmpty(whereClause)) ? " where " : " and "; whereClause += "[" + ColumnNames.REC_STATUS + "] = "; whereClause += (scope == (int)(RecordProcessingScope.Deleted)) ? "0" : "1"; } return whereClause; } /// <summary> /// Call DistictinctStrata with only one column name /// </summary> /// <param name="colName"></param> /// <returns></returns> public DataTable DistinctColumn(Rule_Context pContext, string colName) { #region Preconditions if (string.IsNullOrEmpty(colName)) { return null; } #endregion preconditions return DistinctStrata(pContext, new string[1] { colName }); } /// <summary> /// returns a DataTable with the DISTINCT number of values per stratum /// </summary> /// <param name="strata"></param> /// <returns></returns> public DataTable DistinctStrata(Rule_Context pContext, string[] strata) { #region Preconditions if (strata == null) { return null; } #endregion preconditions StringBuilder strataQuery = new StringBuilder("select distinct "); foreach (string stratum in strata) { strataQuery.Append(stratum).Append(", "); } strataQuery.Length -= 2; strataQuery.Append(GetSqlStatementPartFrom(pContext)); //for distinguishing between native MDB vs projects if (PrimaryTable.Columns.Contains(ColumnNames.REC_STATUS)) { strataQuery.Append(ExcludeMissing(GetSqlStatementPartWhere(), strata)); } else if (!string.IsNullOrEmpty(selectCriteria)) { string whereClause = " where " + this.selectCriteria; strataQuery.Append(ExcludeMissing(whereClause, strata)); } strataQuery.Append(" order by "); foreach (string stratum in strata) { strataQuery.Append(stratum).Append(" desc, "); } strataQuery.Length -= 2; DataTable strataTable = new DataTable("Strata"); strataTable = Db.Select(Db.CreateQuery(strataQuery.ToString()), strataTable); return strataTable; } #endregion Public Methods #endregion Public Interface #region Private Methods /// <summary> /// from part /// </summary> /// <returns></returns> private string GetSqlStatementPartFrom(Rule_Context pContext) { string fromClause = " from " + pContext.CurrentRead.Identifier; foreach (JoinTable joinTable in JoinTableList) { fromClause += " inner join " + joinTable.Table.TableName; fromClause += " on " + joinTable.Condition; } return fromClause; } #endregion Private Methods } /// <summary> /// The JoinTable class /// </summary> public class JoinTable { #region Private Class Members private ITable table = null; private string condition = string.Empty; #endregion Private Class Members #region Constructors /// <summary> /// Default Constructor /// </summary> public JoinTable() { } /// <summary> /// Constructor for the class /// </summary> /// <param name="table"></param> /// <param name="condition">Condition for the join</param> public JoinTable(ITable table, string condition) { this.Table = table; this.Condition = condition; } #endregion Constructors #region Public Properties /// <summary> /// Accessor for table /// </summary> public ITable Table { get { return table; } set { table = value; } } /// <summary> /// Accessor for condition /// </summary> public string Condition { get { return this.condition; } set { this.condition = value; } } #endregion Public Properties } }
using System; using Journalist.EventStore.Events; namespace Journalist.EventStore.Streams { public class EventStreamConsumerStateMachine : IEventStreamConsumerStateMachine { private abstract class State { public static readonly State Initial = new InitialState(); public static readonly State Receiving = new ReceivingStartedState(); public static readonly State Received = new ReceivingCompletedState(); public static readonly State Consuming = new ConsumingState(); public static readonly State Consumed = new ConsumedState(); public static readonly State Closed = new ClosedState(); public virtual State MoveToReceivingStartedState(EventStreamConsumerStateMachine stm) { throw new NotImplementedException(); } public virtual State MoveToReceivingCompletedState(EventStreamConsumerStateMachine stm, int eventsCount) { throw new NotImplementedException(); } public virtual State MoveToConsumingStarted() { throw new NotImplementedException(); } public virtual State MoveToConsumedState(EventStreamConsumerStateMachine stm) { throw new NotImplementedException(); } public virtual State MoveToClosedState(EventStreamConsumerStateMachine stm) { throw new NotImplementedException(); } public virtual void EventProcessingStarted(EventStreamConsumerStateMachine stm) { throw new NotImplementedException(); } public virtual StreamVersion CalculateConsumedStreamVersion(EventStreamConsumerStateMachine stm, bool skipCurrentEvent) { throw new NotImplementedException(); } public virtual void ConsumedStreamVersionCommited(EventStreamConsumerStateMachine stm, StreamVersion version, bool skipCurrent) { stm.m_commitedVersion = version; stm.m_uncommittedEventCount = skipCurrent ? 0 : -1; } } private class InitialState : State { public override State MoveToReceivingStartedState(EventStreamConsumerStateMachine stm) { stm.m_uncommittedEventCount = -1; stm.m_receivedEventCount = 0; return Receiving; } public override State MoveToReceivingCompletedState(EventStreamConsumerStateMachine stm, int eventsCount) { throw new InvalidOperationException("Consumer stream is in receiving state."); } public override State MoveToConsumingStarted() { throw new InvalidOperationException("Consumer stream is empty."); } public override State MoveToConsumedState(EventStreamConsumerStateMachine stm) { throw new InvalidOperationException("Consumer stream is empty."); } public override State MoveToClosedState(EventStreamConsumerStateMachine stm) { stm.m_uncommittedEventCount = 0; return Closed; } } private class ReceivingStartedState : State { public override State MoveToReceivingStartedState(EventStreamConsumerStateMachine stm) { throw new InvalidOperationException("Consumer stream is in receiving started state."); } public override State MoveToReceivingCompletedState(EventStreamConsumerStateMachine stm, int eventsCount) { Require.ZeroOrGreater(eventsCount, "eventsCount"); stm.m_receivedEventCount = eventsCount; return Received; } public override State MoveToConsumingStarted() { throw new InvalidOperationException("Consumer stream is in receiving started state."); } public override State MoveToConsumedState(EventStreamConsumerStateMachine stm) { throw new InvalidOperationException("Consumer stream is in receiving started state."); } public override StreamVersion CalculateConsumedStreamVersion(EventStreamConsumerStateMachine stm, bool skipCurrentEvent) { return stm.m_commitedVersion.Increment(stm.m_uncommittedEventCount); } } private class ReceivingCompletedState : State { public override State MoveToReceivingStartedState(EventStreamConsumerStateMachine stm) { if (stm.m_uncommittedEventCount == -1) { stm.m_uncommittedEventCount = 0; } stm.m_uncommittedEventCount += stm.m_receivedEventCount; stm.m_receivedEventCount = 0; return Receiving; } public override State MoveToReceivingCompletedState(EventStreamConsumerStateMachine stm, int eventsCount) { throw new InvalidOperationException("Consumer stream is in events received state."); } public override State MoveToConsumingStarted() { return Consuming; } public override State MoveToConsumedState(EventStreamConsumerStateMachine stm) { throw new InvalidOperationException("Consumer stream is in events received state."); } public override State MoveToClosedState(EventStreamConsumerStateMachine stm) { if (stm.m_uncommittedEventCount == -1) { stm.m_uncommittedEventCount = 0; } stm.m_uncommittedEventCount += stm.m_receivedEventCount; return Closed; } public override StreamVersion CalculateConsumedStreamVersion(EventStreamConsumerStateMachine stm, bool skipCurrentEvent) { if (stm.m_uncommittedEventCount == -1) { stm.m_uncommittedEventCount = 0; } return stm.m_commitedVersion.Increment(stm.m_uncommittedEventCount + stm.m_receivedEventCount); } } private class ConsumingState : State { public override State MoveToConsumingStarted() { throw new InvalidOperationException("Consumer stream is in consuming state."); } public override State MoveToConsumedState(EventStreamConsumerStateMachine stm) { stm.m_uncommittedEventCount++; return Consumed; } public override State MoveToClosedState(EventStreamConsumerStateMachine stm) { if (stm.m_uncommittedEventCount == -1) { stm.m_uncommittedEventCount = 0; } return Closed; } public override void EventProcessingStarted(EventStreamConsumerStateMachine stm) { stm.m_uncommittedEventCount++; } public override StreamVersion CalculateConsumedStreamVersion(EventStreamConsumerStateMachine stm, bool skipCurrentEvent) { return stm.m_commitedVersion.Increment( skipCurrentEvent ? stm.m_uncommittedEventCount : stm.m_uncommittedEventCount + 1); } } private class ConsumedState : State { public override State MoveToReceivingStartedState(EventStreamConsumerStateMachine stm) { return Receiving; } public override State MoveToReceivingCompletedState(EventStreamConsumerStateMachine stm, int eventsCount) { throw new InvalidOperationException("Consumer stream is in consumed state."); } public override State MoveToClosedState(EventStreamConsumerStateMachine stm) { return Closed; } public override StreamVersion CalculateConsumedStreamVersion(EventStreamConsumerStateMachine stm, bool skipCurrentEvent) { return stm.m_commitedVersion.Increment(stm.m_uncommittedEventCount); } } private class ClosedState : State { public override State MoveToReceivingCompletedState(EventStreamConsumerStateMachine stm, int eventsCount) { throw new InvalidOperationException("Consumer stream is in closed state."); } public override State MoveToConsumingStarted() { throw new InvalidOperationException("Consumer stream is in closed state."); } public override State MoveToClosedState(EventStreamConsumerStateMachine stm) { throw new InvalidOperationException("Consumer stream is in closed state."); } public override StreamVersion CalculateConsumedStreamVersion(EventStreamConsumerStateMachine stm, bool skipCurrentEvent) { return stm.m_commitedVersion.Increment(stm.m_uncommittedEventCount); } } private StreamVersion m_commitedVersion; private State m_state; private int m_uncommittedEventCount = -1; private int m_receivedEventCount; public EventStreamConsumerStateMachine(StreamVersion commitedVersion) { m_commitedVersion = commitedVersion; m_state = State.Initial; } public void ReceivingStarted() { m_state = m_state.MoveToReceivingStartedState(this); } public void ReceivingCompleted(int eventsCount) { m_state = m_state.MoveToReceivingCompletedState(this, eventsCount); } public void ConsumingCompleted() { m_state = m_state.MoveToConsumedState(this); } public void ConsumingStarted() { m_state = m_state.MoveToConsumingStarted(); } public void ConsumerClosed() { m_state = m_state.MoveToClosedState(this); } public void EventProcessingStarted() { m_state.EventProcessingStarted(this); } public bool CommitRequired(bool autoCommitProcessedStreamVersion) { return autoCommitProcessedStreamVersion && (m_state is ConsumedState || m_state is ReceivingStartedState || m_state is ClosedState) && m_uncommittedEventCount > 0; } public StreamVersion CalculateConsumedStreamVersion(bool skipCurrentEvent) { return m_state.CalculateConsumedStreamVersion(this, skipCurrentEvent); } public void ConsumedStreamVersionCommited(StreamVersion version, bool skipCurrent) { m_state.ConsumedStreamVersionCommited(this, version, skipCurrent); } public StreamVersion CommitedStreamVersion => m_commitedVersion; } }
using System; using System.Globalization; using Eto.Drawing; using Eto.Forms; using Eto.GtkSharp.Drawing; using Eto.GtkSharp.Forms; namespace Eto.GtkSharp { public enum GtkStateFlags { Normal, Active, Prelight, Insensitive } public static class GtkConversions { public static Gdk.Color ToGdk(this Color color) { return new Gdk.Color((byte)(color.R * byte.MaxValue), (byte)(color.G * byte.MaxValue), (byte)(color.B * byte.MaxValue)); } public static Cairo.Color ToCairo(this Color color) { return new Cairo.Color((double)color.R, (double)color.G, (double)color.B, (double)color.A); } public static Color ToEto(this Cairo.Color color) { return new Color((float)color.R, (float)color.G, (float)color.B, (float)color.A); } public static Cairo.Rectangle ToCairo(this Rectangle rectangle) { return new Cairo.Rectangle(rectangle.X, rectangle.Y, rectangle.Width, rectangle.Height); } public static Cairo.Rectangle ToCairo(this RectangleF rectangle) { return new Cairo.Rectangle(rectangle.X, rectangle.Y, rectangle.Width, rectangle.Height); } public static Rectangle ToEto(this Cairo.Rectangle rectangle) { return new Rectangle((int)rectangle.X, (int)rectangle.Y, (int)rectangle.Width, (int)rectangle.Height); } public static Cairo.Filter ToCairo(this ImageInterpolation value) { switch (value) { case ImageInterpolation.Default: return Cairo.Filter.Bilinear; case ImageInterpolation.None: return Cairo.Filter.Nearest; case ImageInterpolation.High: return Cairo.Filter.Best; case ImageInterpolation.Low: return Cairo.Filter.Fast; case ImageInterpolation.Medium: return Cairo.Filter.Good; default: throw new NotSupportedException(); } } public static Gdk.InterpType ToGdk(this ImageInterpolation value) { switch (value) { case ImageInterpolation.Default: return Gdk.InterpType.Bilinear; case ImageInterpolation.None: return Gdk.InterpType.Nearest; case ImageInterpolation.High: return Gdk.InterpType.Hyper; case ImageInterpolation.Low: return Gdk.InterpType.Tiles; case ImageInterpolation.Medium: return Gdk.InterpType.Bilinear; default: throw new NotSupportedException(); } } public static Color ToEto(this Gdk.Color color) { return new Color((float)color.Red / ushort.MaxValue, (float)color.Green / ushort.MaxValue, (float)color.Blue / ushort.MaxValue); } public static Gdk.Size ToGdk(this Size size) { return new Gdk.Size(size.Width, size.Height); } public static Size ToEto(this Gdk.Size size) { return new Size(size.Width, size.Height); } public static Size ToEto(this Gtk.Requisition req) { return new Size(req.Width, req.Height); } public static Gdk.Point ToGdk(this Point point) { return new Gdk.Point(point.X, point.Y); } public static Point ToEto(this Gdk.Point point) { return new Point(point.X, point.Y); } public static Gdk.Rectangle ToGdk(this Rectangle rect) { return new Gdk.Rectangle(rect.X, rect.Y, rect.Width, rect.Height); } public static Rectangle ToEto(this Gdk.Rectangle rect) { return new Rectangle(rect.X, rect.Y, rect.Width, rect.Height); } public static DialogResult ToEto(this Gtk.ResponseType result) { switch (result) { case Gtk.ResponseType.None: return DialogResult.None; case Gtk.ResponseType.Reject: return DialogResult.Abort; case Gtk.ResponseType.Accept: return DialogResult.Ignore; case Gtk.ResponseType.Ok: return DialogResult.Ok; case Gtk.ResponseType.Cancel: return DialogResult.Cancel; case Gtk.ResponseType.Yes: return DialogResult.Yes; case Gtk.ResponseType.No: return DialogResult.No; default: return DialogResult.None; } } public static string ToGdk(this ImageFormat format) { switch (format) { case ImageFormat.Jpeg: return "jpeg"; case ImageFormat.Bitmap: return "bmp"; case ImageFormat.Gif: return "gif"; case ImageFormat.Tiff: return "tiff"; case ImageFormat.Png: return "png"; default: throw new ArgumentException(string.Format(CultureInfo.CurrentCulture, "Invalid format specified")); } } public static Gdk.CursorType ToGdk(this CursorType cursor) { switch (cursor) { case CursorType.Arrow: return Gdk.CursorType.Arrow; case CursorType.Crosshair: return Gdk.CursorType.Crosshair; case CursorType.Default: return Gdk.CursorType.Arrow; case CursorType.HorizontalSplit: return Gdk.CursorType.SbVDoubleArrow; case CursorType.VerticalSplit: return Gdk.CursorType.SbHDoubleArrow; case CursorType.IBeam: return Gdk.CursorType.Xterm; case CursorType.Move: return Gdk.CursorType.Fleur; case CursorType.Pointer: return Gdk.CursorType.Hand2; default: throw new NotSupportedException(); } } public static Gtk.ButtonsType ToGtk(this MessageBoxButtons buttons) { switch (buttons) { default: return Gtk.ButtonsType.Ok; case MessageBoxButtons.OKCancel: return Gtk.ButtonsType.OkCancel; case MessageBoxButtons.YesNo: return Gtk.ButtonsType.YesNo; case MessageBoxButtons.YesNoCancel: return Gtk.ButtonsType.YesNo; } } public static Gtk.ResponseType ToGtk(this MessageBoxDefaultButton button, MessageBoxButtons buttons) { switch (button) { case MessageBoxDefaultButton.OK: if (buttons == MessageBoxButtons.YesNo || buttons == MessageBoxButtons.YesNoCancel) return Gtk.ResponseType.Yes; return Gtk.ResponseType.Ok; case MessageBoxDefaultButton.No: return Gtk.ResponseType.No; case MessageBoxDefaultButton.Cancel: return Gtk.ResponseType.Cancel; case MessageBoxDefaultButton.Default: switch (buttons) { case MessageBoxButtons.OK: return Gtk.ResponseType.Ok; case MessageBoxButtons.OKCancel: case MessageBoxButtons.YesNoCancel: return Gtk.ResponseType.Cancel; case MessageBoxButtons.YesNo: return Gtk.ResponseType.No; default: throw new NotSupportedException(); } default: throw new NotSupportedException(); } } public static Gtk.MessageType ToGtk(this MessageBoxType type) { switch (type) { default: return Gtk.MessageType.Info; case MessageBoxType.Error: return Gtk.MessageType.Error; case MessageBoxType.Warning: return Gtk.MessageType.Warning; case MessageBoxType.Question: return Gtk.MessageType.Question; } } public static Gtk.PageOrientation ToGtk(this PageOrientation value) { switch (value) { case PageOrientation.Landscape: return Gtk.PageOrientation.Landscape; case PageOrientation.Portrait: return Gtk.PageOrientation.Portrait; default: throw new NotSupportedException(); } } public static PageOrientation ToEto(this Gtk.PageOrientation value) { switch (value) { case Gtk.PageOrientation.Landscape: return PageOrientation.Landscape; case Gtk.PageOrientation.Portrait: return PageOrientation.Portrait; default: throw new NotSupportedException(); } } public static Gtk.PageRange ToGtkPageRange(this Range<int> range) { return new Gtk.PageRange { Start = range.Start - 1, End = range.End - 1 }; } public static Range<int> ToEto(this Gtk.PageRange range) { return new Range<int>(range.Start + 1, range.End); } public static Gtk.PrintPages ToGtk(this PrintSelection value) { switch (value) { case PrintSelection.AllPages: return Gtk.PrintPages.All; case PrintSelection.SelectedPages: return Gtk.PrintPages.Ranges; default: throw new NotSupportedException(); } } public static PrintSelection ToEto(this Gtk.PrintPages value) { switch (value) { case Gtk.PrintPages.All: return PrintSelection.AllPages; case Gtk.PrintPages.Ranges: return PrintSelection.SelectedPages; default: throw new NotSupportedException(); } } public static void Apply(this Pen pen, GraphicsHandler graphics) { ((PenHandler)pen.Handler).Apply(pen, graphics); } public static void Apply(this Brush brush, GraphicsHandler graphics) { ((BrushHandler)brush.Handler).Apply(brush.ControlObject, graphics); } public static Cairo.LineJoin ToCairo(this PenLineJoin value) { switch (value) { case PenLineJoin.Miter: return Cairo.LineJoin.Miter; case PenLineJoin.Bevel: return Cairo.LineJoin.Bevel; case PenLineJoin.Round: return Cairo.LineJoin.Round; default: throw new NotSupportedException(); } } public static PenLineJoin ToEto(this Cairo.LineJoin value) { switch (value) { case Cairo.LineJoin.Bevel: return PenLineJoin.Bevel; case Cairo.LineJoin.Miter: return PenLineJoin.Miter; case Cairo.LineJoin.Round: return PenLineJoin.Round; default: throw new NotSupportedException(); } } public static Cairo.LineCap ToCairo(this PenLineCap value) { switch (value) { case PenLineCap.Butt: return Cairo.LineCap.Butt; case PenLineCap.Round: return Cairo.LineCap.Round; case PenLineCap.Square: return Cairo.LineCap.Square; default: throw new NotSupportedException(); } } public static PenLineCap ToEto(this Cairo.LineCap value) { switch (value) { case Cairo.LineCap.Butt: return PenLineCap.Butt; case Cairo.LineCap.Round: return PenLineCap.Round; case Cairo.LineCap.Square: return PenLineCap.Square; default: throw new NotSupportedException(); } } public static Cairo.PointD ToCairo(this PointF point) { return new Cairo.PointD(point.X, point.Y); } public static PointF ToEto(this Cairo.PointD point) { return new PointF((float)point.X, (float)point.Y); } public static GraphicsPathHandler ToHandler(this IGraphicsPath path) { return ((GraphicsPathHandler)path.ControlObject); } public static void Apply(this IGraphicsPath path, Cairo.Context context) { ((GraphicsPathHandler)path.ControlObject).Apply(context); } public static Cairo.Matrix ToCairo(this IMatrix matrix) { return (Cairo.Matrix)matrix.ControlObject; } public static IMatrix ToEto(this Cairo.Matrix matrix) { return new MatrixHandler(matrix); } public static Gdk.Pixbuf ToGdk(this Image image) { var handler = image.Handler as IGtkPixbuf; return handler != null ? handler.Pixbuf : null; } public static void SetCairoSurface(this Image image, Cairo.Context context, float x, float y) { Gdk.CairoHelper.SetSourcePixbuf(context, image.ToGdk(), x, y); } public static GradientWrapMode ToEto(this Cairo.Extend extend) { switch (extend) { case Cairo.Extend.Reflect: return GradientWrapMode.Reflect; case Cairo.Extend.Repeat: return GradientWrapMode.Repeat; case Cairo.Extend.Pad: return GradientWrapMode.Pad; default: throw new NotSupportedException(); } } public static Cairo.Extend ToCairo(this GradientWrapMode wrap) { switch (wrap) { case GradientWrapMode.Reflect: return Cairo.Extend.Reflect; case GradientWrapMode.Repeat: return Cairo.Extend.Repeat; case GradientWrapMode.Pad: return Cairo.Extend.Pad; default: throw new NotSupportedException(); } } public static Gtk.Image ToGtk(this Image image, Gtk.IconSize? size = null) { if (image == null) return null; var handler = (IImageHandler)image.Handler; var gtkimage = new Gtk.Image(); handler.SetImage(gtkimage, size); return gtkimage; } public static void SetGtkImage(this Image image, Gtk.Image gtkimage, Gtk.IconSize? size = null) { if (image == null) return; var handler = (IImageHandler)image.Handler; handler.SetImage(gtkimage, size); } public static Cairo.FillRule ToCairo(this FillMode value) { switch (value) { case FillMode.Alternate: return Cairo.FillRule.EvenOdd; case FillMode.Winding: return Cairo.FillRule.Winding; default: throw new NotSupportedException(); } } public static KeyEventArgs ToEto(this Gdk.EventKey args) { Keys key = args.Key.ToEto() | args.State.ToEtoKey(); if (key != Keys.None) { Keys modifiers = (key & Keys.ModifierMask); if (args.KeyValue <= 128 && ((modifiers & ~Keys.Shift) == 0)) return new KeyEventArgs(key, KeyEventType.KeyDown, (char)args.KeyValue); return new KeyEventArgs(key, KeyEventType.KeyDown); } return args.KeyValue <= 128 ? new KeyEventArgs(key, KeyEventType.KeyDown, (char)args.KeyValue) : null; } public static MouseButtons ToEtoMouseButtons(this Gdk.ModifierType modifiers) { MouseButtons buttons = MouseButtons.None; if (modifiers.HasFlag(Gdk.ModifierType.Button1Mask)) buttons |= MouseButtons.Primary; if (modifiers.HasFlag(Gdk.ModifierType.Button2Mask)) buttons |= MouseButtons.Middle; if (modifiers.HasFlag(Gdk.ModifierType.Button3Mask)) buttons |= MouseButtons.Alternate; return buttons; } public static MouseButtons ToEtoMouseButtons(this Gdk.EventButton ev) { switch (ev.Button) { case 1: return MouseButtons.Primary; case 2: return MouseButtons.Middle; case 3: return MouseButtons.Alternate; default: return MouseButtons.None; } } public static DrawableCellStates ToEto(this Gtk.CellRendererState value) { if (value.HasFlag(Gtk.CellRendererState.Selected)) return DrawableCellStates.Selected; return DrawableCellStates.None; } public static TextAlignment ToEto(this Gtk.Justification justification) { switch (justification) { case Gtk.Justification.Left: return TextAlignment.Left; case Gtk.Justification.Right: return TextAlignment.Right; case Gtk.Justification.Center: return TextAlignment.Center; default: throw new NotSupportedException(); } } public static Gtk.Justification ToGtk(this TextAlignment align) { switch (align) { case TextAlignment.Left: return Gtk.Justification.Left; case TextAlignment.Center: return Gtk.Justification.Center; case TextAlignment.Right: return Gtk.Justification.Right; default: throw new NotSupportedException(); } } public static Pango.FontDescription ToPango(this Font font) { return font == null ? null : ((FontHandler)font.Handler).Control; } public static Pango.FontFamily ToPango(this FontFamily family) { if (family == null) return null; return ((FontFamilyHandler)family.Handler).Control; } public static Font ToEto(this Pango.FontDescription fontDesc, string familyName = null) { return fontDesc == null ? null : new Font(new FontHandler(fontDesc, familyName)); } public static Gtk.Window ToGtk(this Window window) { if (window == null) return null; var gtkWindow = window.Handler as IGtkWindow; if (gtkWindow != null) return gtkWindow.Control; return null; } public static Gtk.PositionType ToGtk(this DockPosition position) { switch (position) { case DockPosition.Top: return Gtk.PositionType.Top; case DockPosition.Left: return Gtk.PositionType.Left; case DockPosition.Right: return Gtk.PositionType.Right; case DockPosition.Bottom: return Gtk.PositionType.Bottom; default: throw new NotSupportedException(); } } public static DockPosition ToEto(this Gtk.PositionType position) { switch (position) { case Gtk.PositionType.Left: return DockPosition.Left; case Gtk.PositionType.Right: return DockPosition.Right; case Gtk.PositionType.Top: return DockPosition.Top; case Gtk.PositionType.Bottom: return DockPosition.Bottom; default: throw new NotSupportedException(); } } } }
#region Using Directives using System.Data; using Newtonsoft.Json; using Newtonsoft.Json.Linq; using System; using System.Collections.Generic; #endregion //Using Directives namespace Figlut.MonoDroid.Toolkit.Utilities.Serialization { /// <summary> /// Converts a DataTable to JSON. Note no support for deserialization /// </summary> public class DataTableConverter : JsonConverter { /// <summary> /// Determines whether this instance can convert the specified object type. /// </summary> /// <param name="objectType">Type of the object.</param><returns> /// <c>true</c> if this instance can convert the specified object type; otherwise, <c>false</c>. /// </returns> public override bool CanConvert(System.Type objectType) { //Return objectType = GetType(DataTable) return typeof(DataTable).IsAssignableFrom(objectType); } /// <summary> /// Reads the json. /// </summary> /// <param name="reader">The reader.</param> /// <param name="objectType">Type of the object.</param> /// <param name="existingValue">The existing value.</param> /// <param name="serializer">The serializer.</param><returns></returns> public override object ReadJson(Newtonsoft.Json.JsonReader reader, System.Type objectType, object existingValue, Newtonsoft.Json.JsonSerializer serializer) { JObject jObject = JObject.Load(reader); DataTable table = new DataTable(); if (jObject["TableName"] != null) { table.TableName = jObject["TableName"].ToString(); } if (jObject["Columns"] == null) return table; // Loop through the columns in the table and apply any properties provided foreach (JObject jColumn in jObject["Columns"]) { DataColumn column = new DataColumn(); JToken token = default(JToken); token = jColumn.SelectToken("AllowDBNull"); if (token != null) { column.AllowDBNull = token.Value<bool>(); } token = jColumn.SelectToken("AutoIncrement"); if (token != null) { column.AutoIncrement = token.Value<bool>(); } token = jColumn.SelectToken("AutoIncrementSeed"); if (token != null) { column.AutoIncrementSeed = token.Value<long>(); } token = jColumn.SelectToken("AutoIncrementStep"); if (token != null) { column.AutoIncrementStep = token.Value<long>(); } token = jColumn.SelectToken("Caption"); if (token != null) { column.Caption = token.Value<string>(); } token = jColumn.SelectToken("ColumnName"); if (token != null) { column.ColumnName = token.Value<string>(); } // Allowed data types: http://msdn.microsoft.com/en-us/library/system.data.datacolumn.datatype.aspx token = jColumn.SelectToken("DataType"); if (token != null) { string dataType = token.Value<string>(); if (dataType == "Byte[]") { column.DataType = typeof(System.Byte[]); } else { // All allowed data types exist in the System namespace column.DataType = Type.GetType(string.Concat("System.", dataType)); } } token = jColumn.SelectToken("DateTimeMode"); if (token != null) { column.DateTimeMode = (DataSetDateTime)Enum.Parse(typeof(System.Data.DataSetDateTime), token.Value<string>()); } // Can't set default value on auto increment column if (!column.AutoIncrement) { token = jColumn.SelectToken("DefaultValue"); if (token != null) { // If a default value is provided then cast to the columns data type if (column.DataType == typeof(System.Boolean)) { bool defaultValue = false; if (bool.TryParse (token.ToString (), out defaultValue)) { column.DefaultValue = defaultValue; } break; } else if (column.DataType == typeof(System.Byte)) { byte defaultValue1 = 0; if (byte.TryParse (token.ToString (), out defaultValue1)) { column.DefaultValue = defaultValue1; } break; } else if (column.DataType == typeof(System.Char)) { char defaultValue2 = '\0'; if (char.TryParse (token.ToString (), out defaultValue2)) { column.DefaultValue = defaultValue2; } } else if (column.DataType == typeof(System.DateTime)) { DateTime defaultValue3 = default(DateTime); if (DateTime.TryParse (token.ToString (), out defaultValue3)) { column.DefaultValue = defaultValue3; } } else if (column.DataType == typeof(System.Decimal)) { decimal defaultValue4 = default(decimal); if (decimal.TryParse (token.ToString (), out defaultValue4)) { column.DefaultValue = defaultValue4; } } else if (column.DataType == typeof(System.Double)) { double defaultValue5 = 0; if (double.TryParse (token.ToString (), out defaultValue5)) { column.DefaultValue = defaultValue5; } } else if (column.DataType == typeof(System.Guid)) { Guid defaultValue6 = default(Guid); if (Guid.TryParse (token.ToString (), out defaultValue6)) { column.DefaultValue = defaultValue6; } } else if (column.DataType == typeof(System.Int16)) { Int16 defaultValue7 = default(Int16); if (Int16.TryParse (token.ToString (), out defaultValue7)) { column.DefaultValue = defaultValue7; } } else if (column.DataType == typeof(System.Int32)) { Int32 defaultValue8 = default(Int32); if (Int32.TryParse (token.ToString (), out defaultValue8)) { column.DefaultValue = defaultValue8; } } else if (column.DataType == typeof(System.Int64)) { Int64 defaultValue9 = default(Int64); if (Int64.TryParse (token.ToString (), out defaultValue9)) { column.DefaultValue = defaultValue9; } } else if (column.DataType == typeof(System.SByte)) { sbyte defaultValue10 = 0; if (sbyte.TryParse (token.ToString (), out defaultValue10)) { column.DefaultValue = defaultValue10; } } else if (column.DataType == typeof(System.Single)) { float defaultValue11 = 0; if (float.TryParse (token.ToString (), out defaultValue11)) { column.DefaultValue = defaultValue11; } } else if (column.DataType == typeof(System.String)) { column.DefaultValue = token.ToString (); } else if (column.DataType == typeof(System.TimeSpan)) { TimeSpan defaultValue12 = default(TimeSpan); if (TimeSpan.TryParse (token.ToString (), out defaultValue12)) { column.DefaultValue = defaultValue12; } } else if (column.DataType == typeof(System.UInt16)) { UInt16 defaultValue13 = default(UInt16); if (UInt16.TryParse (token.ToString (), out defaultValue13)) { column.DefaultValue = defaultValue13; } } else if (column.DataType == typeof(System.UInt32)) { UInt32 defaultValue14 = default(UInt32); if (UInt32.TryParse (token.ToString (), out defaultValue14)) { column.DefaultValue = defaultValue14; } } else if (column.DataType == typeof(System.UInt64)) { UInt64 defaultValue15 = default(UInt64); if (UInt64.TryParse (token.ToString (), out defaultValue15)) { column.DefaultValue = defaultValue15; } } else if (column.DataType == typeof(System.Byte[])) { } else { } } } token = jColumn.SelectToken("MaxLength"); if (token != null) { column.MaxLength = token.Value<int>(); } token = jColumn.SelectToken("ReadOnly"); if (token != null) { column.ReadOnly = token.Value<bool>(); } token = jColumn.SelectToken("Unique"); if (token != null) { column.Unique = token.Value<bool>(); } table.Columns.Add(column); } // Add the rows to the table if (jObject["Rows"] != null) { foreach (JArray jRow in jObject["Rows"]) { DataRow row = table.NewRow(); // Each row is just an array of objects row.ItemArray = jRow.ToObject<System.Object[]>(); table.Rows.Add(row); } } // Add the primary key to the table if supplied if (jObject["PrimaryKey"] != null) { List<DataColumn> primaryKey = new List<DataColumn>(); foreach (JValue jPrimaryKey in jObject["PrimaryKey"]) { DataColumn column = table.Columns[jPrimaryKey.ToString()]; if (column == null) { throw new ApplicationException("Invalid primary key."); } else { primaryKey.Add(column); } } table.PrimaryKey = primaryKey.ToArray(); } return table; } /// <summary> /// Writes the json. /// </summary> /// <param name="writer">The writer.</param> /// <param name="value">The value.</param> /// <param name="serializer">The serializer.</param> public override void WriteJson(Newtonsoft.Json.JsonWriter writer, object value, Newtonsoft.Json.JsonSerializer serializer) { DataTable table = value as DataTable; writer.WriteStartObject(); writer.WritePropertyName("TableName"); writer.WriteValue(table.TableName); writer.WritePropertyName("Columns"); writer.WriteStartArray(); foreach (DataColumn column in table.Columns) { writer.WriteStartObject(); writer.WritePropertyName("AllowDBNull"); writer.WriteValue(column.AllowDBNull); writer.WritePropertyName("AutoIncrement"); writer.WriteValue(column.AutoIncrement); writer.WritePropertyName("AutoIncrementSeed"); writer.WriteValue(column.AutoIncrementSeed); writer.WritePropertyName("AutoIncrementStep"); writer.WriteValue(column.AutoIncrementStep); writer.WritePropertyName("Caption"); writer.WriteValue(column.Caption); writer.WritePropertyName("ColumnName"); writer.WriteValue(column.ColumnName); writer.WritePropertyName("DataType"); writer.WriteValue(column.DataType.Name); writer.WritePropertyName("DateTimeMode"); writer.WriteValue(column.DateTimeMode.ToString()); writer.WritePropertyName("DefaultValue"); writer.WriteValue(column.DefaultValue); writer.WritePropertyName("MaxLength"); writer.WriteValue(column.MaxLength); writer.WritePropertyName("Ordinal"); writer.WriteValue(column.Ordinal); writer.WritePropertyName("ReadOnly"); writer.WriteValue(column.ReadOnly); writer.WritePropertyName("Unique"); writer.WriteValue(column.Unique); writer.WriteEndObject(); } writer.WriteEndArray(); writer.WritePropertyName("Rows"); writer.WriteStartArray(); foreach (DataRow row in table.Rows) { if (row.RowState != DataRowState.Deleted && row.RowState != DataRowState.Detached) { writer.WriteStartArray(); for (int index = 0; index <= table.Columns.Count - 1; index++) { writer.WriteValue(row[index]); } writer.WriteEndArray(); } } writer.WriteEndArray(); // Write out primary key if the table has one. This will be useful when deserializing the table. // We will write it out as an array of column names writer.WritePropertyName("PrimaryKey"); writer.WriteStartArray(); if (table.PrimaryKey.Length > 0) { foreach (DataColumn column in table.PrimaryKey) { writer.WriteValue(column.ColumnName); } } writer.WriteEndArray(); writer.WriteEndObject(); } } }
//Copyright (C) 2006 Richard J. Northedge // // This library is free software; you can redistribute it and/or // modify it under the terms of the GNU Lesser General Public // License as published by the Free Software Foundation; either // version 2.1 of the License, or (at your option) any later version. // // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public // License along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. //This file is based on the ProperNounResolver.java source file found in the //original java implementation of OpenNLP. That source file contains the following header: //Copyright (C) 2003 Thomas Morton // //This library is free software; you can redistribute it and/or //modify it under the terms of the GNU Lesser General Public //License as published by the Free Software Foundation; either //version 2.1 of the License, or (at your option) any later version. // //This library is distributed in the hope that it will be useful, //but WITHOUT ANY WARRANTY; without even the implied warranty of //MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the //GNU Lesser General Public License for more details. // //You should have received a copy of the GNU Lesser General Public //License along with this program; if not, write to the Free Software //Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. using System; //UPGRADE_TODO: The type 'java.util.regex.Pattern' could not be found. If it was not included in the conversion, there may be compiler issues. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1262'" using System.Collections; using System.IO; using System.Text; using System.Text.RegularExpressions; using MentionContext = OpenNLP.Tools.Coreference.Mention.MentionContext; using System.Collections.Generic; namespace OpenNLP.Tools.Coreference.Resolver { /// <summary> Resolves coreference between proper nouns.</summary> public class ProperNounResolver:MaximumEntropyResolver { //UPGRADE_NOTE: Final was removed from the declaration of 'initialCaps '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'" private static readonly Regex InitialCaps = new Regex("^[A-Z]", RegexOptions.Compiled); private static IDictionary _acroMap; private static bool _acroMapLoaded = false; public ProperNounResolver(string projectName, ResolverMode mode):base(projectName, "pnmodel", mode, 500) { if (!_acroMapLoaded) { initAcronyms(projectName + "/acronyms"); _acroMapLoaded = true; } ShowExclusions = false; } public ProperNounResolver(string projectName, ResolverMode mode, INonReferentialResolver nonReferentialResolver):base(projectName, "pnmodel", mode, 500, nonReferentialResolver) { if (!_acroMapLoaded) { initAcronyms(projectName + "/acronyms"); _acroMapLoaded = true; } ShowExclusions = false; } public override bool CanResolve(MentionContext mention) { return (PartsOfSpeech.IsProperNoun(mention.HeadTokenTag) || mention.HeadTokenTag.StartsWith(PartsOfSpeech.CardinalNumber)); } private void initAcronyms(string name) { //UPGRADE_TODO: Class 'java.util.HashMap' was converted to 'System.Collections.Hashtable' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javautilHashMap'" _acroMap = new Hashtable(15000); try { StreamReader str; //if (MaxentResolver.loadAsResource()) //{ //UPGRADE_TODO: The differences in the expected value of parameters for constructor 'java.io.BufferedReader.BufferedReader' may cause compilation errors. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1092'" //UPGRADE_WARNING: At least one expression was used more than once in the target code. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1181'" //UPGRADE_ISSUE: Method 'java.lang.Class.getResourceAsStream' was not converted. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1000_javalangClassgetResourceAsStream_javalangString'" //str = new System.IO.StreamReader(new System.IO.StreamReader(this.GetType().getResourceAsStream(name), System.Text.Encoding.Default).BaseStream, new System.IO.StreamReader(this.GetType().getResourceAsStream(name), System.Text.Encoding.Default).CurrentEncoding); //} //else //{ //UPGRADE_TODO: The differences in the expected value of parameters for constructor 'java.io.BufferedReader.BufferedReader' may cause compilation errors. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1092'" //UPGRADE_WARNING: At least one expression was used more than once in the target code. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1181'" //UPGRADE_TODO: Constructor 'java.io.FileReader.FileReader' was converted to 'System.IO.StreamReader' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073'" using (var fileStream = new FileStream(name, FileMode.Open)){ using (var reader = new StreamReader(fileStream, Encoding.GetEncoding(0))){ string line; while (null != (line = reader.ReadLine())) { var st = new Util.StringTokenizer(line, "\t"); string acro = st.NextToken(); string full = st.NextToken(); var exSet = (Util.Set<string>)_acroMap[acro]; if (exSet == null) { //UPGRADE_TODO: Class 'java.util.HashSet' was converted to 'SupportClass.HashSetSupport' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javautilHashSet'" exSet = new Util.HashSet<string>(); _acroMap[acro] = exSet; } exSet.Add(full); exSet = (Util.Set<string>)_acroMap[full]; if (exSet == null) { //UPGRADE_TODO: Class 'java.util.HashSet' was converted to 'SupportClass.HashSetSupport' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javautilHashSet'" exSet = new Util.HashSet<string>(); _acroMap[full] = exSet; } exSet.Add(acro); } } } } catch (IOException e) { //UPGRADE_TODO: The equivalent in .NET for method 'java.lang.Throwable.ToString' may return a different value. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1043'" Console.Error.WriteLine("ProperNounResolver.initAcronyms: Acronym Database not found: " + e); } } private MentionContext getProperNounExtent(DiscourseEntity de) { foreach (MentionContext xec in de.Mentions) { //use first extent which is propername string xecHeadTag = xec.HeadTokenTag; if (PartsOfSpeech.IsProperNoun(xecHeadTag) || InitialCaps.IsMatch(xec.HeadTokenText)) { return xec; } } return null; } private bool isAcronym(string ecStrip, string xecStrip) { var exSet = (Util.Set<string>) _acroMap[ecStrip]; if (exSet != null && exSet.Contains(xecStrip)) { return true; } return false; } protected internal virtual List<string> getAcronymFeatures(MentionContext mention, DiscourseEntity entity) { MentionContext xec = getProperNounExtent(entity); string ecStrip = StripNounPhrase(mention); string xecStrip = StripNounPhrase(xec); if (ecStrip != null && xecStrip != null) { if (isAcronym(ecStrip, xecStrip)) { var features = new List<string>(1) {"knownAcronym"}; return features; } } return new List<string>(); } protected internal override List<string> GetFeatures(MentionContext mention, DiscourseEntity entity) { List<string> features = base.GetFeatures(mention, entity); if (entity != null) { features.AddRange(GetStringMatchFeatures(mention, entity)); features.AddRange(getAcronymFeatures(mention, entity)); } return features; } protected internal override bool IsExcluded(MentionContext mention, DiscourseEntity entity) { if (base.IsExcluded(mention, entity)) { return true; } foreach (MentionContext xec in entity.Mentions) { if (PartsOfSpeech.IsProperNoun(xec.HeadTokenTag)) { // || initialCaps.matcher(xec.headToken.ToString()).find()) { return false; } } return true; } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Immutable; using System.Diagnostics; using System.Globalization; using System.IO; using System.Linq; using System.Reflection; using System.Runtime.InteropServices; using System.Text; using System.Threading; using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.CSharp; using Microsoft.CodeAnalysis.CSharp.Test.Utilities; using Microsoft.CodeAnalysis.Editor.CSharp.Interactive; using Microsoft.CodeAnalysis.Interactive; using Microsoft.CodeAnalysis.Test.Utilities; using Roslyn.Test.Utilities; using Roslyn.Utilities; using Xunit; using Traits = Roslyn.Test.Utilities.Traits; namespace Microsoft.CodeAnalysis.UnitTests.Interactive { [Trait(Traits.Feature, Traits.Features.InteractiveHost)] public sealed class InteractiveHostTests : AbstractInteractiveHostTests { #region Utils private SynchronizedStringWriter _synchronizedOutput; private SynchronizedStringWriter _synchronizedErrorOutput; private int[] _outputReadPosition = new int[] { 0, 0 }; private readonly InteractiveHost _host; private static readonly string s_fxDir = FileUtilities.NormalizeDirectoryPath(RuntimeEnvironment.GetRuntimeDirectory()); private static readonly string s_homeDir = FileUtilities.NormalizeDirectoryPath(Environment.GetFolderPath(Environment.SpecialFolder.UserProfile)); public InteractiveHostTests() { _host = new InteractiveHost(typeof(CSharpReplServiceProvider), GetInteractiveHostPath(), ".", millisecondsTimeout: -1); RedirectOutput(); _host.ResetAsync(new InteractiveHostOptions(initializationFile: null, culture: CultureInfo.InvariantCulture)).Wait(); var remoteService = _host.TryGetService(); Assert.NotNull(remoteService); _host.SetPathsAsync(new[] { s_fxDir }, new[] { s_homeDir }, s_homeDir).Wait(); // assert and remove logo: var output = SplitLines(ReadOutputToEnd()); var errorOutput = ReadErrorOutputToEnd(); Assert.Equal("", errorOutput); Assert.Equal(2, output.Length); Assert.Equal("Microsoft (R) Roslyn C# Compiler version " + FileVersionInfo.GetVersionInfo(_host.GetType().Assembly.Location).FileVersion, output[0]); // "Type "#help" for more information." Assert.Equal(FeaturesResources.TypeHelpForMoreInformation, output[1]); // remove logo: ClearOutput(); } public override void Dispose() { try { Process process = _host.TryGetProcess(); DisposeInteractiveHostProcess(_host); // the process should be terminated if (process != null && !process.HasExited) { process.WaitForExit(); } } finally { // Dispose temp files only after the InteractiveHost exits, // so that assemblies are unloaded. base.Dispose(); } } private void RedirectOutput() { _synchronizedOutput = new SynchronizedStringWriter(); _synchronizedErrorOutput = new SynchronizedStringWriter(); ClearOutput(); _host.Output = _synchronizedOutput; _host.ErrorOutput = _synchronizedErrorOutput; } private bool LoadReference(string reference) { return Execute($"#r \"{reference}\""); } private bool Execute(string code) { var task = _host.ExecuteAsync(code); task.Wait(); return task.Result.Success; } private bool IsShadowCopy(string path) { return _host.TryGetService().IsShadowCopy(path); } public string ReadErrorOutputToEnd() { return ReadOutputToEnd(isError: true); } public void ClearOutput() { _outputReadPosition = new int[] { 0, 0 }; _synchronizedOutput.Clear(); _synchronizedErrorOutput.Clear(); } public void RestartHost(string rspFile = null) { ClearOutput(); var initTask = _host.ResetAsync(new InteractiveHostOptions(initializationFile: rspFile, culture: CultureInfo.InvariantCulture)); initTask.Wait(); } public string ReadOutputToEnd(bool isError = false) { var writer = isError ? _synchronizedErrorOutput : _synchronizedOutput; var markPrefix = '\uFFFF'; var mark = markPrefix + Guid.NewGuid().ToString(); // writes mark to the STDOUT/STDERR pipe in the remote process: _host.TryGetService().RemoteConsoleWrite(Encoding.UTF8.GetBytes(mark), isError); while (true) { var data = writer.Prefix(mark, ref _outputReadPosition[isError ? 0 : 1]); if (data != null) { return data; } Thread.Sleep(10); } } private class CompiledFile { public string Path; public ImmutableArray<byte> Image; } private static CompiledFile CompileLibrary(TempDirectory dir, string fileName, string assemblyName, string source, params MetadataReference[] references) { var file = dir.CreateFile(fileName); var compilation = CreateCompilation( new[] { source }, assemblyName: assemblyName, references: references.Concat(new[] { MetadataReference.CreateFromAssemblyInternal(typeof(object).Assembly) }), options: fileName.EndsWith(".exe", StringComparison.OrdinalIgnoreCase) ? TestOptions.ReleaseExe : TestOptions.ReleaseDll); var image = compilation.EmitToArray(); file.WriteAllBytes(image); return new CompiledFile { Path = file.Path, Image = image }; } #endregion [Fact] public void OutputRedirection() { Execute(@" System.Console.WriteLine(""hello-\u4567!""); System.Console.Error.WriteLine(""error-\u7890!""); 1+1 "); var output = ReadOutputToEnd(); var error = ReadErrorOutputToEnd(); Assert.Equal("hello-\u4567!\r\n2\r\n", output); Assert.Equal("error-\u7890!\r\n", error); } [Fact] public void OutputRedirection2() { Execute(@"System.Console.WriteLine(1);"); Execute(@"System.Console.Error.WriteLine(2);"); var output = ReadOutputToEnd(); var error = ReadErrorOutputToEnd(); Assert.Equal("1\r\n", output); Assert.Equal("2\r\n", error); RedirectOutput(); Execute(@"System.Console.WriteLine(3);"); Execute(@"System.Console.Error.WriteLine(4);"); output = ReadOutputToEnd(); error = ReadErrorOutputToEnd(); Assert.Equal("3\r\n", output); Assert.Equal("4\r\n", error); } [Fact] public void StackOverflow() { // Windows Server 2008 (OS v6.0), Vista (OS v6.0) and XP (OS v5.1) ignores SetErrorMode and shows crash dialog, which would hang the test: if (Environment.OSVersion.Version < new Version(6, 1, 0, 0)) { return; } Execute(@" int foo(int a0, int a1, int a2, int a3, int a4, int a5, int a6, int a7, int a8, int a9) { return foo(0,1,2,3,4,5,6,7,8,9) + foo(0,1,2,3,4,5,6,7,8,9); } foo(0,1,2,3,4,5,6,7,8,9) "); Assert.Equal("", ReadOutputToEnd()); // Hosting process exited with exit code -1073741571. Assert.Equal("Process is terminated due to StackOverflowException.\n" + string.Format(FeaturesResources.HostingProcessExitedWithExitCode, -1073741571), ReadErrorOutputToEnd().Trim()); Execute(@"1+1"); Assert.Equal("2\r\n", ReadOutputToEnd().ToString()); } private const string MethodWithInfiniteLoop = @" void foo() { int i = 0; while (true) { if (i < 10) { i = i + 1; } else if (i == 10) { System.Console.Error.WriteLine(""in the loop""); i = i + 1; } } } "; [Fact] public void AsyncExecute_InfiniteLoop() { var mayTerminate = new ManualResetEvent(false); _host.ErrorOutputReceived += (_, __) => mayTerminate.Set(); var executeTask = _host.ExecuteAsync(MethodWithInfiniteLoop + "\r\nfoo()"); Assert.True(mayTerminate.WaitOne()); RestartHost(); executeTask.Wait(); Assert.True(Execute(@"1+1")); Assert.Equal("2\r\n", ReadOutputToEnd()); } [Fact(Skip = "529027")] public void AsyncExecute_HangingForegroundThreads() { var mayTerminate = new ManualResetEvent(false); _host.OutputReceived += (_, __) => { mayTerminate.Set(); }; var executeTask = _host.ExecuteAsync(@" using System.Threading; int i1 = 0; Thread t1 = new Thread(() => { while(true) { i1++; } }); t1.Name = ""TestThread-1""; t1.IsBackground = false; t1.Start(); int i2 = 0; Thread t2 = new Thread(() => { while(true) { i2++; } }); t2.Name = ""TestThread-2""; t2.IsBackground = true; t2.Start(); Thread t3 = new Thread(() => Thread.Sleep(Timeout.Infinite)); t3.Name = ""TestThread-3""; t3.Start(); while (i1 < 2 || i2 < 2 || t3.ThreadState != System.Threading.ThreadState.WaitSleepJoin) { } System.Console.WriteLine(""terminate!""); while(true) {} "); Assert.Equal("", ReadErrorOutputToEnd()); Assert.True(mayTerminate.WaitOne()); var service = _host.TryGetService(); Assert.NotNull(service); var process = _host.TryGetProcess(); Assert.NotNull(process); service.EmulateClientExit(); // the process should terminate with exit code 0: process.WaitForExit(); Assert.Equal(0, process.ExitCode); } [Fact] public void AsyncExecuteFile_InfiniteLoop() { var file = Temp.CreateFile().WriteAllText(MethodWithInfiniteLoop + "\r\nfoo();").Path; var mayTerminate = new ManualResetEvent(false); _host.ErrorOutputReceived += (_, __) => mayTerminate.Set(); var executeTask = _host.ExecuteFileAsync(file); mayTerminate.WaitOne(); RestartHost(); executeTask.Wait(); Assert.True(Execute(@"1+1")); Assert.Equal("2\r\n", ReadOutputToEnd()); } [Fact] public void AsyncExecuteFile_SourceKind() { var file = Temp.CreateFile().WriteAllText("1 1").Path; var task = _host.ExecuteFileAsync(file); task.Wait(); Assert.False(task.Result.Success); var errorOut = ReadErrorOutputToEnd().Trim(); Assert.True(errorOut.StartsWith(file + "(1,3):", StringComparison.Ordinal), "Error output should start with file name, line and column"); Assert.True(errorOut.Contains("CS1002"), "Error output should include error CS1002"); } [Fact] public void AsyncExecuteFile_NonExistingFile() { var task = _host.ExecuteFileAsync("non existing file"); task.Wait(); Assert.False(task.Result.Success); var errorOut = ReadErrorOutputToEnd().Trim(); Assert.Contains(FeaturesResources.SpecifiedFileNotFound, errorOut, StringComparison.Ordinal); Assert.Contains(FeaturesResources.SearchedInDirectory, errorOut, StringComparison.Ordinal); } [Fact] public void AsyncExecuteFile() { var file = Temp.CreateFile().WriteAllText(@" using static System.Console; public class C { public int field = 4; public int Foo(int i) { return i; } } public int Foo(int i) { return i; } WriteLine(5); ").Path; var task = _host.ExecuteFileAsync(file); task.Wait(); Assert.True(task.Result.Success); Assert.Equal("5", ReadOutputToEnd().Trim()); Execute("Foo(2)"); Assert.Equal("2", ReadOutputToEnd().Trim()); Execute("new C().Foo(3)"); Assert.Equal("3", ReadOutputToEnd().Trim()); Execute("new C().field"); Assert.Equal("4", ReadOutputToEnd().Trim()); } [Fact] public void AsyncExecuteFile_InvalidFileContent() { var executeTask = _host.ExecuteFileAsync(typeof(Process).Assembly.Location); executeTask.Wait(); var errorOut = ReadErrorOutputToEnd().Trim(); Assert.True(errorOut.StartsWith(typeof(Process).Assembly.Location + "(1,3):", StringComparison.Ordinal), "Error output should start with file name, line and column"); Assert.True(errorOut.Contains("CS1056"), "Error output should include error CS1056"); Assert.True(errorOut.Contains("CS1002"), "Error output should include error CS1002"); } [Fact] public void AsyncExecuteFile_ScriptFileWithBuildErrors() { var file = Temp.CreateFile().WriteAllText("#load blah.csx" + "\r\n" + "class C {}"); _host.ExecuteFileAsync(file.Path).Wait(); var errorOut = ReadErrorOutputToEnd().Trim(); Assert.True(errorOut.StartsWith(file.Path + "(1,7):", StringComparison.Ordinal), "Error output should start with file name, line and column"); Assert.True(errorOut.Contains("CS7010"), "Error output should include error CS7010"); } /// <summary> /// Check that the assembly resolve event doesn't cause any harm. It shouldn't actually be /// even invoked since we resolve the assembly via Fusion. /// </summary> [Fact(Skip = "987032")] public void UserDefinedAssemblyResolve_InfiniteLoop() { var mayTerminate = new ManualResetEvent(false); _host.ErrorOutputReceived += (_, __) => mayTerminate.Set(); _host.TryGetService().HookMaliciousAssemblyResolve(); var executeTask = _host.AddReferenceAsync("nonexistingassembly" + Guid.NewGuid()); Assert.True(mayTerminate.WaitOne()); executeTask.Wait(); Assert.True(Execute(@"1+1")); var output = ReadOutputToEnd(); Assert.Equal("2\r\n", output); } [Fact] public void AddReference_Path() { Assert.False(Execute("new System.Data.DataSet()")); Assert.True(LoadReference(Assembly.Load(new AssemblyName("System.Data, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")).Location)); Assert.True(Execute("new System.Data.DataSet()")); } [Fact] public void AddReference_PartialName() { Assert.False(Execute("new System.Data.DataSet()")); Assert.True(LoadReference("System.Data")); Assert.True(Execute("new System.Data.DataSet()")); } [Fact] public void AddReference_PartialName_LatestVersion() { // there might be two versions of System.Data - v2 and v4, we should get the latter: Assert.True(LoadReference("System.Data")); Assert.True(LoadReference("System")); Assert.True(LoadReference("System.Xml")); Execute(@"new System.Data.DataSet().GetType().Assembly.GetName().Version"); var output = ReadOutputToEnd(); Assert.Equal("[4.0.0.0]\r\n", output); } [Fact] public void AddReference_FullName() { Assert.False(Execute("new System.Data.DataSet()")); Assert.True(LoadReference("System.Data, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")); Assert.True(Execute("new System.Data.DataSet()")); } [ConditionalFact(typeof(Framework35Installed), Skip = "https://github.com/dotnet/roslyn/issues/5167")] public void AddReference_VersionUnification1() { // V3.5 unifies with the current Framework version: var result = LoadReference("System.Core, Version=3.5.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089"); Assert.Equal("", ReadErrorOutputToEnd().Trim()); Assert.Equal("", ReadOutputToEnd().Trim()); Assert.True(result); result = LoadReference("System.Core, Version=3.5.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089"); Assert.Equal("", ReadErrorOutputToEnd().Trim()); Assert.Equal("", ReadOutputToEnd().Trim()); Assert.True(result); result = LoadReference("System.Core"); Assert.Equal("", ReadErrorOutputToEnd().Trim()); Assert.Equal("", ReadOutputToEnd().Trim()); Assert.True(result); } [Fact] public void AddReference_AssemblyAlreadyLoaded() { var result = LoadReference("System.Core"); Assert.Equal("", ReadErrorOutputToEnd().Trim()); Assert.Equal("", ReadOutputToEnd().Trim()); Assert.True(result); result = LoadReference("System.Core.dll"); Assert.Equal("", ReadErrorOutputToEnd().Trim()); Assert.Equal("", ReadOutputToEnd().Trim()); Assert.True(result); } // Caused by submission not inheriting references. [Fact(Skip = "101161")] public void AddReference_ShadowCopy() { var dir = Temp.CreateDirectory(); // create C.dll var c = CompileLibrary(dir, "c.dll", "c", @"public class C { }"); // load C.dll: Assert.True(LoadReference(c.Path)); Assert.True(Execute("new C()")); Assert.Equal("C { }", ReadOutputToEnd().Trim()); // rewrite C.dll: File.WriteAllBytes(c.Path, new byte[] { 1, 2, 3 }); // we can still run code: var result = Execute("new C()"); Assert.Equal("", ReadErrorOutputToEnd().Trim()); Assert.Equal("C { }", ReadOutputToEnd().Trim()); Assert.True(result); } #if TODO /// <summary> /// Tests that a dependency is correctly resolved and loaded at runtime. /// A depends on B, which depends on C. When CallB is jitted B is loaded. When CallC is jitted C is loaded. /// </summary> [Fact(Skip = "https://github.com/dotnet/roslyn/issues/860")] public void AddReference_Dependencies() { var dir = Temp.CreateDirectory(); var c = CompileLibrary(dir, "c.dll", "c", @"public class C { }"); var b = CompileLibrary(dir, "b.dll", "b", @"public class B { public static int CallC() { new C(); return 1; } }", MetadataReference.CreateFromImage(c.Image)); var a = CompileLibrary(dir, "a.dll", "a", @"public class A { public static int CallB() { B.CallC(); return 1; } }", MetadataReference.CreateFromImage(b.Image)); AssemblyLoadResult result; result = LoadReference(a.Path); Assert.Equal(a.Path, result.OriginalPath); Assert.True(IsShadowCopy(result.Path)); Assert.True(result.IsSuccessful); Assert.True(Execute("A.CallB()")); // c.dll is loaded as a dependency, so #r should be successful: result = LoadReference(c.Path); Assert.Equal(c.Path, result.OriginalPath); Assert.True(IsShadowCopy(result.Path)); Assert.True(result.IsSuccessful); // c.dll was already loaded explicitly via #r so we should fail now: result = LoadReference(c.Path); Assert.False(result.IsSuccessful); Assert.Equal(c.Path, result.OriginalPath); Assert.True(IsShadowCopy(result.Path)); Assert.Equal("", ReadErrorOutputToEnd().Trim()); Assert.Equal("1", ReadOutputToEnd().Trim()); } #endif /// <summary> /// When two files of the same version are in the same directory, prefer .dll over .exe. /// </summary> [Fact] public void AddReference_Dependencies_DllExe() { var dir = Temp.CreateDirectory(); var dll = CompileLibrary(dir, "c.dll", "C", @"public class C { public static int Main() { return 1; } }"); var exe = CompileLibrary(dir, "c.exe", "C", @"public class C { public static int Main() { return 2; } }"); var main = CompileLibrary(dir, "main.exe", "Main", @"public class Program { public static int Main() { return C.Main(); } }", MetadataReference.CreateFromImage(dll.Image)); Assert.True(LoadReference(main.Path)); Assert.True(Execute("Program.Main()")); Assert.Equal("", ReadErrorOutputToEnd().Trim()); Assert.Equal("1", ReadOutputToEnd().Trim()); } [Fact] public void AddReference_Dependencies_Versions() { var dir1 = Temp.CreateDirectory(); var dir2 = Temp.CreateDirectory(); var dir3 = Temp.CreateDirectory(); // [assembly:AssemblyVersion("1.0.0.0")] public class C { public static int Main() { return 1; } }"); var file1 = dir1.CreateFile("c.dll").WriteAllBytes(TestResources.General.C1); // [assembly:AssemblyVersion("2.0.0.0")] public class C { public static int Main() { return 2; } }"); var file2 = dir2.CreateFile("c.dll").WriteAllBytes(TestResources.General.C2); Assert.True(LoadReference(file1.Path)); Assert.True(LoadReference(file2.Path)); var main = CompileLibrary(dir3, "main.exe", "Main", @"public class Program { public static int Main() { return C.Main(); } }", MetadataReference.CreateFromImage(TestResources.General.C2.AsImmutableOrNull())); Assert.True(LoadReference(main.Path)); Assert.True(Execute("Program.Main()")); Assert.Equal("", ReadErrorOutputToEnd().Trim()); Assert.Equal("2", ReadOutputToEnd().Trim()); } [Fact] public void AddReference_AlreadyLoadedDependencies() { var dir = Temp.CreateDirectory(); var lib1 = CompileLibrary(dir, "lib1.dll", "lib1", @"public interface I { int M(); }"); var lib2 = CompileLibrary(dir, "lib2.dll", "lib2", @"public class C : I { public int M() { return 1; } }", MetadataReference.CreateFromFile(lib1.Path)); Execute("#r \"" + lib1.Path + "\""); Execute("#r \"" + lib2.Path + "\""); Execute("new C().M()"); Assert.Equal("", ReadErrorOutputToEnd().Trim()); Assert.Equal("1", ReadOutputToEnd().Trim()); } [Fact(Skip = "101161")] public void AddReference_LoadUpdatedReference() { var dir = Temp.CreateDirectory(); var source1 = "public class C { public int X = 1; }"; var c1 = CreateCompilationWithMscorlib(source1, assemblyName: "C"); var file = dir.CreateFile("c.dll").WriteAllBytes(c1.EmitToArray()); // use: Execute($@" #r ""{file.Path}"" C foo() => new C(); new C().X "); // update: var source2 = "public class D { public int Y = 2; }"; var c2 = CreateCompilationWithMscorlib(source2, assemblyName: "C"); file.WriteAllBytes(c2.EmitToArray()); // add the reference again: Execute($@" #r ""{file.Path}"" new D().Y "); // TODO: We should report an error that assembly named 'a' was already loaded with different content. // In future we can let it load and improve error reporting around type conversions. Assert.Equal("", ReadErrorOutputToEnd().Trim()); Assert.Equal( @"1 2", ReadOutputToEnd().Trim()); } [Fact(Skip = "129388")] public void AddReference_MultipleReferencesWithSameWeakIdentity() { var dir = Temp.CreateDirectory(); var dir1 = dir.CreateDirectory("1"); var dir2 = dir.CreateDirectory("2"); var source1 = "public class C1 { }"; var c1 = CreateCompilationWithMscorlib(source1, assemblyName: "C"); var file1 = dir1.CreateFile("c.dll").WriteAllBytes(c1.EmitToArray()); var source2 = "public class C2 { }"; var c2 = CreateCompilationWithMscorlib(source2, assemblyName: "C"); var file2 = dir2.CreateFile("c.dll").WriteAllBytes(c2.EmitToArray()); Execute($@" #r ""{file1.Path}"" #r ""{file2.Path}"" "); Execute("new C1()"); Execute("new C2()"); // TODO: We should report an error that assembly named 'c' was already loaded with different content. // In future we can let it load and let the compiler report the error CS1704: "An assembly with the same simple name 'C' has already been imported". Assert.Equal( @"(2,1): error CS1704: An assembly with the same simple name 'C' has already been imported. Try removing one of the references (e.g. '" + file1.Path + @"') or sign them to enable side-by-side. (1,5): error CS0246: The type or namespace name 'C1' could not be found (are you missing a using directive or an assembly reference?) (1,5): error CS0246: The type or namespace name 'C2' could not be found (are you missing a using directive or an assembly reference?)", ReadErrorOutputToEnd().Trim()); Assert.Equal("", ReadOutputToEnd().Trim()); } [Fact(Skip = "129388")] public void AddReference_MultipleReferencesWeakVersioning() { var dir = Temp.CreateDirectory(); var dir1 = dir.CreateDirectory("1"); var dir2 = dir.CreateDirectory("2"); var source1 = @"[assembly: System.Reflection.AssemblyVersion(""1.0.0.0"")] public class C1 { }"; var c1 = CreateCompilationWithMscorlib(source1, assemblyName: "C"); var file1 = dir1.CreateFile("c.dll").WriteAllBytes(c1.EmitToArray()); var source2 = @"[assembly: System.Reflection.AssemblyVersion(""2.0.0.0"")] public class C2 { }"; var c2 = CreateCompilationWithMscorlib(source2, assemblyName: "C"); var file2 = dir2.CreateFile("c.dll").WriteAllBytes(c2.EmitToArray()); Execute($@" #r ""{file1.Path}"" #r ""{file2.Path}"" "); Execute("new C1()"); Execute("new C2()"); // TODO: We should report an error that assembly named 'c' was already loaded with different content. // In future we can let it load and improve error reporting around type conversions. Assert.Equal("TODO: error", ReadErrorOutputToEnd().Trim()); Assert.Equal("", ReadOutputToEnd().Trim()); } //// TODO (987032): //// [Fact] //// public void AsyncInitializeContextWithDotNETLibraries() //// { //// var rspFile = Temp.CreateFile(); //// var rspDisplay = Path.GetFileName(rspFile.Path); //// var initScript = Temp.CreateFile(); //// rspFile.WriteAllText(@" /////r:System.Core ////""" + initScript.Path + @""" ////"); //// initScript.WriteAllText(@" ////using static System.Console; ////using System.Linq.Expressions; ////WriteLine(Expression.Constant(123)); ////"); //// // override default "is restarting" behavior (the REPL is already initialized): //// var task = Host.InitializeContextAsync(rspFile.Path, isRestarting: false, killProcess: true); //// task.Wait(); //// var output = SplitLines(ReadOutputToEnd()); //// var errorOutput = ReadErrorOutputToEnd(); //// Assert.Equal(4, output.Length); //// Assert.Equal("Microsoft (R) Roslyn C# Compiler version " + FileVersionInfo.GetVersionInfo(typeof(Compilation).Assembly.Location).FileVersion, output[0]); //// Assert.Equal("Loading context from '" + rspDisplay + "'.", output[1]); //// Assert.Equal("Type \"#help\" for more information.", output[2]); //// Assert.Equal("123", output[3]); //// Assert.Equal("", errorOutput); //// Host.InitializeContextAsync(rspFile.Path).Wait(); //// output = SplitLines(ReadOutputToEnd()); //// errorOutput = ReadErrorOutputToEnd(); //// Assert.True(2 == output.Length, "Output is: '" + string.Join("<NewLine>", output) + "'. Expecting 2 lines."); //// Assert.Equal("Loading context from '" + rspDisplay + "'.", output[0]); //// Assert.Equal("123", output[1]); //// Assert.Equal("", errorOutput); //// } //// [Fact] //// public void AsyncInitializeContextWithBothUserDefinedAndDotNETLibraries() //// { //// var dir = Temp.CreateDirectory(); //// var rspFile = Temp.CreateFile(); //// var initScript = Temp.CreateFile(); //// var dll = CompileLibrary(dir, "c.dll", "C", @"public class C { public static int Main() { return 1; } }"); //// rspFile.WriteAllText(@" /////r:System.Numerics /////r:" + dll.Path + @" ////""" + initScript.Path + @""" ////"); //// initScript.WriteAllText(@" ////using static System.Console; ////using System.Numerics; ////WriteLine(new Complex(12, 6).Real + C.Main()); ////"); //// // override default "is restarting" behavior (the REPL is already initialized): //// var task = Host.InitializeContextAsync(rspFile.Path, isRestarting: false, killProcess: true); //// task.Wait(); //// var errorOutput = ReadErrorOutputToEnd(); //// Assert.Equal("", errorOutput); //// var output = SplitLines(ReadOutputToEnd()); //// Assert.Equal(4, output.Length); //// Assert.Equal("Microsoft (R) Roslyn C# Compiler version " + FileVersionInfo.GetVersionInfo(Host.GetType().Assembly.Location).FileVersion, output[0]); //// Assert.Equal("Loading context from '" + Path.GetFileName(rspFile.Path) + "'.", output[1]); //// Assert.Equal("Type \"#help\" for more information.", output[2]); //// Assert.Equal("13", output[3]); //// } [Fact] public void ReferencePaths() { var directory = Temp.CreateDirectory(); var assemblyName = GetUniqueName(); CompileLibrary(directory, assemblyName + ".dll", assemblyName, @"public class C { }"); var rspFile = Temp.CreateFile(); rspFile.WriteAllText("/lib:" + directory.Path); _host.ResetAsync(new InteractiveHostOptions(initializationFile: rspFile.Path, culture: CultureInfo.InvariantCulture)).Wait(); Execute( $@"#r ""{assemblyName}.dll"" typeof(C).Assembly.GetName()"); Assert.Equal("", ReadErrorOutputToEnd()); var output = SplitLines(ReadOutputToEnd()); Assert.Equal(2, output.Length); Assert.Equal("Loading context from '" + Path.GetFileName(rspFile.Path) + "'.", output[0]); Assert.Equal($"[{assemblyName}, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null]", output[1]); } [Fact] public void DefaultUsings() { var rspFile = Temp.CreateFile(); rspFile.WriteAllText(@" /r:System /r:System.Core /r:Microsoft.CSharp /u:System /u:System.IO /u:System.Collections.Generic /u:System.Diagnostics /u:System.Dynamic /u:System.Linq /u:System.Linq.Expressions /u:System.Text /u:System.Threading.Tasks "); _host.ResetAsync(new InteractiveHostOptions(initializationFile: rspFile.Path, culture: CultureInfo.InvariantCulture)).Wait(); Execute(@" dynamic d = new ExpandoObject(); "); Execute(@" Process p = new Process(); "); Execute(@" Expression<Func<int>> e = () => 1; "); Execute(@" var squares = from x in new[] { 1, 2, 3 } select x * x; "); Execute(@" var sb = new StringBuilder(); "); Execute(@" var list = new List<int>(); "); Execute(@" var stream = new MemoryStream(); await Task.Delay(10); p = new Process(); Console.Write(""OK"") "); AssertEx.AssertEqualToleratingWhitespaceDifferences("", ReadErrorOutputToEnd()); AssertEx.AssertEqualToleratingWhitespaceDifferences( $@"Loading context from '{Path.GetFileName(rspFile.Path)}'. OK ", ReadOutputToEnd()); } [Fact] public void InitialScript_Error() { var initFile = Temp.CreateFile(extension: ".csx").WriteAllText("1 1"); var rspFile = Temp.CreateFile(); rspFile.WriteAllText($@" /r:System /u:System.Diagnostics {initFile.Path} "); _host.ResetAsync(new InteractiveHostOptions(initializationFile: rspFile.Path, culture: CultureInfo.InvariantCulture)).Wait(); Execute("new Process()"); AssertEx.AssertEqualToleratingWhitespaceDifferences($@" {initFile.Path}(1,3): error CS1002: ; expected ", ReadErrorOutputToEnd()); AssertEx.AssertEqualToleratingWhitespaceDifferences($@" Loading context from '{Path.GetFileName(rspFile.Path)}'. [System.Diagnostics.Process] ", ReadOutputToEnd()); } [Fact] public void ScriptAndArguments() { var scriptFile = Temp.CreateFile(extension: ".csx").WriteAllText("foreach (var arg in Args) Print(arg);"); var rspFile = Temp.CreateFile(); rspFile.WriteAllText($@" {scriptFile} a b c "); _host.ResetAsync(new InteractiveHostOptions(initializationFile: rspFile.Path, culture: CultureInfo.InvariantCulture)).Wait(); Assert.Equal("", ReadErrorOutputToEnd()); AssertEx.AssertEqualToleratingWhitespaceDifferences( $@"Loading context from '{Path.GetFileName(rspFile.Path)}'. ""a"" ""b"" ""c"" ", ReadOutputToEnd()); } [Fact] public void ReferenceDirectives() { Execute(@" #r ""System.Numerics"" #r """ + typeof(System.Linq.Expressions.Expression).Assembly.Location + @""" using static System.Console; using System.Linq.Expressions; using System.Numerics; WriteLine(Expression.Constant(1)); WriteLine(new Complex(2, 6).Real); "); var output = ReadOutputToEnd(); Assert.Equal("1\r\n2\r\n", output); } [Fact] public void Script_NoHostNamespaces() { Execute("nameof(Microsoft.CodeAnalysis)"); AssertEx.AssertEqualToleratingWhitespaceDifferences(@" (1,8): error CS0234: The type or namespace name 'CodeAnalysis' does not exist in the namespace 'Microsoft' (are you missing an assembly reference?)", ReadErrorOutputToEnd()); Assert.Equal("", ReadOutputToEnd()); } [Fact] public void ExecutesOnStaThread() { Execute(@" #r ""System"" #r ""System.Xaml"" #r ""WindowsBase"" #r ""PresentationCore"" #r ""PresentationFramework"" new System.Windows.Window(); System.Console.WriteLine(""OK""); "); var error = ReadErrorOutputToEnd(); Assert.Equal("", error); var output = ReadOutputToEnd(); Assert.Equal("OK\r\n", output); } /// <summary> /// Execution of expressions should be /// sequential, even await expressions. /// </summary> [Fact] public void ExecuteSequentially() { Execute(@"using System; using System.Threading.Tasks;"); Execute(@"await Task.Delay(1000).ContinueWith(t => 1)"); Execute(@"await Task.Delay(500).ContinueWith(t => 2)"); Execute(@"3"); var output = ReadOutputToEnd(); Assert.Equal("1\r\n2\r\n3\r\n", output); } [Fact] public void MultiModuleAssembly() { var dir = Temp.CreateDirectory(); var dll = dir.CreateFile("MultiModule.dll").WriteAllBytes(TestResources.SymbolsTests.MultiModule.MultiModuleDll); dir.CreateFile("mod2.netmodule").WriteAllBytes(TestResources.SymbolsTests.MultiModule.mod2); dir.CreateFile("mod3.netmodule").WriteAllBytes(TestResources.SymbolsTests.MultiModule.mod3); Execute(@" #r """ + dll.Path + @""" new object[] { new Class1(), new Class2(), new Class3() } "); var error = ReadErrorOutputToEnd(); Assert.Equal("", error); var output = ReadOutputToEnd(); Assert.Equal("object[3] { Class1 { }, Class2 { }, Class3 { } }\r\n", output); } [Fact] public void SearchPaths1() { var dll = Temp.CreateFile(extension: ".dll").WriteAllBytes(TestResources.MetadataTests.InterfaceAndClass.CSInterfaces01); var srcDir = Temp.CreateDirectory(); var dllDir = Path.GetDirectoryName(dll.Path); srcDir.CreateFile("foo.csx").WriteAllText("ReferencePaths.Add(@\"" + dllDir + "\");"); Func<string, string> normalizeSeparatorsAndFrameworkFolders = (s) => s.Replace("\\", "\\\\").Replace("Framework64", "Framework"); // print default: _host.ExecuteAsync(@"ReferencePaths").Wait(); var output = ReadOutputToEnd(); Assert.Equal("SearchPaths { \"" + normalizeSeparatorsAndFrameworkFolders(string.Join("\", \"", new[] { s_fxDir })) + "\" }\r\n", output); _host.ExecuteAsync(@"SourcePaths").Wait(); output = ReadOutputToEnd(); Assert.Equal("SearchPaths { \"" + normalizeSeparatorsAndFrameworkFolders(string.Join("\", \"", new[] { s_homeDir })) + "\" }\r\n", output); // add and test if added: _host.ExecuteAsync("SourcePaths.Add(@\"" + srcDir + "\");").Wait(); _host.ExecuteAsync(@"SourcePaths").Wait(); output = ReadOutputToEnd(); Assert.Equal("SearchPaths { \"" + normalizeSeparatorsAndFrameworkFolders(string.Join("\", \"", new[] { s_homeDir, srcDir.Path })) + "\" }\r\n", output); // execute file (uses modified search paths), the file adds a reference path _host.ExecuteFileAsync("foo.csx").Wait(); _host.ExecuteAsync(@"ReferencePaths").Wait(); output = ReadOutputToEnd(); Assert.Equal("SearchPaths { \"" + normalizeSeparatorsAndFrameworkFolders(string.Join("\", \"", new[] { s_fxDir, dllDir })) + "\" }\r\n", output); _host.AddReferenceAsync(Path.GetFileName(dll.Path)).Wait(); _host.ExecuteAsync(@"typeof(Metadata.ICSProp)").Wait(); var error = ReadErrorOutputToEnd(); Assert.Equal("", error); output = ReadOutputToEnd(); Assert.Equal("[Metadata.ICSProp]\r\n", output); } [Fact, WorkItem(6457, "https://github.com/dotnet/roslyn/issues/6457")] public void MissingReferencesReuse() { var source = @" public class C { public System.Diagnostics.Process P; } "; var lib = CSharpCompilation.Create( "Lib", new[] { SyntaxFactory.ParseSyntaxTree(source) }, new[] { TestReferences.NetFx.v4_0_30319.mscorlib, TestReferences.NetFx.v4_0_30319.System }, new CSharpCompilationOptions(OutputKind.DynamicallyLinkedLibrary)); var libFile = Temp.CreateFile("lib").WriteAllBytes(lib.EmitToArray()); Execute($@"#r ""{libFile.Path}"""); Execute("C c;"); Execute("c = new C()"); var error = ReadErrorOutputToEnd(); Assert.Equal("", error); var output = ReadOutputToEnd(); AssertEx.AssertEqualToleratingWhitespaceDifferences("C { P=null }", output); } [Fact, WorkItem(7280, "https://github.com/dotnet/roslyn/issues/7280")] public void AsyncContinueOnDifferentThread() { Execute(@" using System; using System.Threading; using System.Threading.Tasks; Console.Write(Task.Run(() => { Thread.CurrentThread.Join(100); return 42; }).ContinueWith(t => t.Result).Result)"); var output = ReadOutputToEnd(); var error = ReadErrorOutputToEnd(); Assert.Equal("42", output); Assert.Empty(error); } [Fact] public void Exception() { Execute(@"throw new System.Exception();"); var output = ReadOutputToEnd(); var error = ReadErrorOutputToEnd(); Assert.Equal("", output); Assert.DoesNotContain("Unexpected", error, StringComparison.OrdinalIgnoreCase); Assert.True(error.StartsWith(new Exception().Message)); } #region Submission result printing - null/void/value. [Fact] public void SubmissionResult_PrintingNull() { Execute(@" string s; s "); var output = ReadOutputToEnd(); Assert.Equal("null\r\n", output); } [Fact] public void SubmissionResult_PrintingVoid() { Execute(@"System.Console.WriteLine(2)"); var output = ReadOutputToEnd(); Assert.Equal("2\r\n", output); Execute(@" void foo() { } foo() "); output = ReadOutputToEnd(); Assert.Equal("", output); } // TODO (https://github.com/dotnet/roslyn/issues/7976): delete this [WorkItem(7976, "https://github.com/dotnet/roslyn/issues/7976")] [Fact] public void Workaround7976() { Thread.Sleep(TimeSpan.FromSeconds(10)); } #endregion private static ImmutableArray<string> SplitLines(string text) { return ImmutableArray.Create(text.Split(new[] { "\r\n" }, StringSplitOptions.RemoveEmptyEntries)); } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: booking/groups/reservation_invoices.proto #pragma warning disable 1591, 0612, 3021 #region Designer generated code using pb = global::Google.Protobuf; using pbc = global::Google.Protobuf.Collections; using pbr = global::Google.Protobuf.Reflection; using scg = global::System.Collections.Generic; namespace HOLMS.Types.Booking.Groups { /// <summary>Holder for reflection information generated from booking/groups/reservation_invoices.proto</summary> public static partial class ReservationInvoicesReflection { #region Descriptor /// <summary>File descriptor for booking/groups/reservation_invoices.proto</summary> public static pbr::FileDescriptor Descriptor { get { return descriptor; } } private static pbr::FileDescriptor descriptor; static ReservationInvoicesReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( "Cilib29raW5nL2dyb3Vwcy9yZXNlcnZhdGlvbl9pbnZvaWNlcy5wcm90bxIa", "aG9sbXMudHlwZXMuYm9va2luZy5ncm91cHMaLmJvb2tpbmcvaW5kaWNhdG9y", "cy9yZXNlcnZhdGlvbl9pbmRpY2F0b3IucHJvdG8aH3ByaW1pdGl2ZS9tb25l", "dGFyeV9hbW91bnQucHJvdG8aFHByaW1pdGl2ZS91dWlkLnByb3RvIrwDChlS", "ZXNlcnZhdGlvbkludm9pY2VNYXBwaW5nEi8KCmludm9pY2VfaWQYASABKAsy", "Gy5ob2xtcy50eXBlcy5wcmltaXRpdmUuVXVpZBJMCg5yZXNlcnZhdGlvbl9p", "ZBgCIAEoCzI0LmhvbG1zLnR5cGVzLmJvb2tpbmcuaW5kaWNhdG9ycy5SZXNl", "cnZhdGlvbkluZGljYXRvchI1CgZhbW91bnQYAyABKAsyJS5ob2xtcy50eXBl", "cy5wcmltaXRpdmUuTW9uZXRhcnlBbW91bnQSNgoHcGF5bWVudBgEIAEoCzIl", "LmhvbG1zLnR5cGVzLnByaW1pdGl2ZS5Nb25ldGFyeUFtb3VudBI0CgV0YXhl", "cxgFIAEoCzIlLmhvbG1zLnR5cGVzLnByaW1pdGl2ZS5Nb25ldGFyeUFtb3Vu", "dBIzCgRmZWVzGAYgASgLMiUuaG9sbXMudHlwZXMucHJpbWl0aXZlLk1vbmV0", "YXJ5QW1vdW50EjYKB2NoYXJnZXMYByABKAsyJS5ob2xtcy50eXBlcy5wcmlt", "aXRpdmUuTW9uZXRhcnlBbW91bnQSDgoGdm9pZGVkGAggASgIQi1aDmJvb2tp", "bmcvZ3JvdXBzqgIaSE9MTVMuVHlwZXMuQm9va2luZy5Hcm91cHNiBnByb3Rv", "Mw==")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { global::HOLMS.Types.Booking.Indicators.ReservationIndicatorReflection.Descriptor, global::HOLMS.Types.Primitive.MonetaryAmountReflection.Descriptor, global::HOLMS.Types.Primitive.UuidReflection.Descriptor, }, new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::HOLMS.Types.Booking.Groups.ReservationInvoiceMapping), global::HOLMS.Types.Booking.Groups.ReservationInvoiceMapping.Parser, new[]{ "InvoiceId", "ReservationId", "Amount", "Payment", "Taxes", "Fees", "Charges", "Voided" }, null, null, null) })); } #endregion } #region Messages public sealed partial class ReservationInvoiceMapping : pb::IMessage<ReservationInvoiceMapping> { private static readonly pb::MessageParser<ReservationInvoiceMapping> _parser = new pb::MessageParser<ReservationInvoiceMapping>(() => new ReservationInvoiceMapping()); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<ReservationInvoiceMapping> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::HOLMS.Types.Booking.Groups.ReservationInvoicesReflection.Descriptor.MessageTypes[0]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public ReservationInvoiceMapping() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public ReservationInvoiceMapping(ReservationInvoiceMapping other) : this() { InvoiceId = other.invoiceId_ != null ? other.InvoiceId.Clone() : null; ReservationId = other.reservationId_ != null ? other.ReservationId.Clone() : null; Amount = other.amount_ != null ? other.Amount.Clone() : null; Payment = other.payment_ != null ? other.Payment.Clone() : null; Taxes = other.taxes_ != null ? other.Taxes.Clone() : null; Fees = other.fees_ != null ? other.Fees.Clone() : null; Charges = other.charges_ != null ? other.Charges.Clone() : null; voided_ = other.voided_; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public ReservationInvoiceMapping Clone() { return new ReservationInvoiceMapping(this); } /// <summary>Field number for the "invoice_id" field.</summary> public const int InvoiceIdFieldNumber = 1; private global::HOLMS.Types.Primitive.Uuid invoiceId_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public global::HOLMS.Types.Primitive.Uuid InvoiceId { get { return invoiceId_; } set { invoiceId_ = value; } } /// <summary>Field number for the "reservation_id" field.</summary> public const int ReservationIdFieldNumber = 2; private global::HOLMS.Types.Booking.Indicators.ReservationIndicator reservationId_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public global::HOLMS.Types.Booking.Indicators.ReservationIndicator ReservationId { get { return reservationId_; } set { reservationId_ = value; } } /// <summary>Field number for the "amount" field.</summary> public const int AmountFieldNumber = 3; private global::HOLMS.Types.Primitive.MonetaryAmount amount_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public global::HOLMS.Types.Primitive.MonetaryAmount Amount { get { return amount_; } set { amount_ = value; } } /// <summary>Field number for the "payment" field.</summary> public const int PaymentFieldNumber = 4; private global::HOLMS.Types.Primitive.MonetaryAmount payment_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public global::HOLMS.Types.Primitive.MonetaryAmount Payment { get { return payment_; } set { payment_ = value; } } /// <summary>Field number for the "taxes" field.</summary> public const int TaxesFieldNumber = 5; private global::HOLMS.Types.Primitive.MonetaryAmount taxes_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public global::HOLMS.Types.Primitive.MonetaryAmount Taxes { get { return taxes_; } set { taxes_ = value; } } /// <summary>Field number for the "fees" field.</summary> public const int FeesFieldNumber = 6; private global::HOLMS.Types.Primitive.MonetaryAmount fees_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public global::HOLMS.Types.Primitive.MonetaryAmount Fees { get { return fees_; } set { fees_ = value; } } /// <summary>Field number for the "charges" field.</summary> public const int ChargesFieldNumber = 7; private global::HOLMS.Types.Primitive.MonetaryAmount charges_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public global::HOLMS.Types.Primitive.MonetaryAmount Charges { get { return charges_; } set { charges_ = value; } } /// <summary>Field number for the "voided" field.</summary> public const int VoidedFieldNumber = 8; private bool voided_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Voided { get { return voided_; } set { voided_ = value; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as ReservationInvoiceMapping); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(ReservationInvoiceMapping other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (!object.Equals(InvoiceId, other.InvoiceId)) return false; if (!object.Equals(ReservationId, other.ReservationId)) return false; if (!object.Equals(Amount, other.Amount)) return false; if (!object.Equals(Payment, other.Payment)) return false; if (!object.Equals(Taxes, other.Taxes)) return false; if (!object.Equals(Fees, other.Fees)) return false; if (!object.Equals(Charges, other.Charges)) return false; if (Voided != other.Voided) return false; return true; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; if (invoiceId_ != null) hash ^= InvoiceId.GetHashCode(); if (reservationId_ != null) hash ^= ReservationId.GetHashCode(); if (amount_ != null) hash ^= Amount.GetHashCode(); if (payment_ != null) hash ^= Payment.GetHashCode(); if (taxes_ != null) hash ^= Taxes.GetHashCode(); if (fees_ != null) hash ^= Fees.GetHashCode(); if (charges_ != null) hash ^= Charges.GetHashCode(); if (Voided != false) hash ^= Voided.GetHashCode(); return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { if (invoiceId_ != null) { output.WriteRawTag(10); output.WriteMessage(InvoiceId); } if (reservationId_ != null) { output.WriteRawTag(18); output.WriteMessage(ReservationId); } if (amount_ != null) { output.WriteRawTag(26); output.WriteMessage(Amount); } if (payment_ != null) { output.WriteRawTag(34); output.WriteMessage(Payment); } if (taxes_ != null) { output.WriteRawTag(42); output.WriteMessage(Taxes); } if (fees_ != null) { output.WriteRawTag(50); output.WriteMessage(Fees); } if (charges_ != null) { output.WriteRawTag(58); output.WriteMessage(Charges); } if (Voided != false) { output.WriteRawTag(64); output.WriteBool(Voided); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; if (invoiceId_ != null) { size += 1 + pb::CodedOutputStream.ComputeMessageSize(InvoiceId); } if (reservationId_ != null) { size += 1 + pb::CodedOutputStream.ComputeMessageSize(ReservationId); } if (amount_ != null) { size += 1 + pb::CodedOutputStream.ComputeMessageSize(Amount); } if (payment_ != null) { size += 1 + pb::CodedOutputStream.ComputeMessageSize(Payment); } if (taxes_ != null) { size += 1 + pb::CodedOutputStream.ComputeMessageSize(Taxes); } if (fees_ != null) { size += 1 + pb::CodedOutputStream.ComputeMessageSize(Fees); } if (charges_ != null) { size += 1 + pb::CodedOutputStream.ComputeMessageSize(Charges); } if (Voided != false) { size += 1 + 1; } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(ReservationInvoiceMapping other) { if (other == null) { return; } if (other.invoiceId_ != null) { if (invoiceId_ == null) { invoiceId_ = new global::HOLMS.Types.Primitive.Uuid(); } InvoiceId.MergeFrom(other.InvoiceId); } if (other.reservationId_ != null) { if (reservationId_ == null) { reservationId_ = new global::HOLMS.Types.Booking.Indicators.ReservationIndicator(); } ReservationId.MergeFrom(other.ReservationId); } if (other.amount_ != null) { if (amount_ == null) { amount_ = new global::HOLMS.Types.Primitive.MonetaryAmount(); } Amount.MergeFrom(other.Amount); } if (other.payment_ != null) { if (payment_ == null) { payment_ = new global::HOLMS.Types.Primitive.MonetaryAmount(); } Payment.MergeFrom(other.Payment); } if (other.taxes_ != null) { if (taxes_ == null) { taxes_ = new global::HOLMS.Types.Primitive.MonetaryAmount(); } Taxes.MergeFrom(other.Taxes); } if (other.fees_ != null) { if (fees_ == null) { fees_ = new global::HOLMS.Types.Primitive.MonetaryAmount(); } Fees.MergeFrom(other.Fees); } if (other.charges_ != null) { if (charges_ == null) { charges_ = new global::HOLMS.Types.Primitive.MonetaryAmount(); } Charges.MergeFrom(other.Charges); } if (other.Voided != false) { Voided = other.Voided; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 10: { if (invoiceId_ == null) { invoiceId_ = new global::HOLMS.Types.Primitive.Uuid(); } input.ReadMessage(invoiceId_); break; } case 18: { if (reservationId_ == null) { reservationId_ = new global::HOLMS.Types.Booking.Indicators.ReservationIndicator(); } input.ReadMessage(reservationId_); break; } case 26: { if (amount_ == null) { amount_ = new global::HOLMS.Types.Primitive.MonetaryAmount(); } input.ReadMessage(amount_); break; } case 34: { if (payment_ == null) { payment_ = new global::HOLMS.Types.Primitive.MonetaryAmount(); } input.ReadMessage(payment_); break; } case 42: { if (taxes_ == null) { taxes_ = new global::HOLMS.Types.Primitive.MonetaryAmount(); } input.ReadMessage(taxes_); break; } case 50: { if (fees_ == null) { fees_ = new global::HOLMS.Types.Primitive.MonetaryAmount(); } input.ReadMessage(fees_); break; } case 58: { if (charges_ == null) { charges_ = new global::HOLMS.Types.Primitive.MonetaryAmount(); } input.ReadMessage(charges_); break; } case 64: { Voided = input.ReadBool(); break; } } } } } #endregion } #endregion Designer generated code
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Composition; using System.Diagnostics; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis.CodeActions; using Microsoft.CodeAnalysis.CodeRefactorings; using Microsoft.CodeAnalysis.CSharp.Extensions; using Microsoft.CodeAnalysis.CSharp.Symbols; using Microsoft.CodeAnalysis.CSharp.Syntax; using Microsoft.CodeAnalysis.CSharp.Utilities; using Microsoft.CodeAnalysis.FindSymbols; using Microsoft.CodeAnalysis.Formatting; using Microsoft.CodeAnalysis.Shared.Extensions; using Microsoft.CodeAnalysis.Simplification; using Roslyn.Utilities; namespace Microsoft.CodeAnalysis.CSharp.CodeRefactorings.InlineTemporary { [ExportCodeRefactoringProvider(LanguageNames.CSharp, Name = PredefinedCodeRefactoringProviderNames.InlineTemporary), Shared] internal partial class InlineTemporaryCodeRefactoringProvider : CodeRefactoringProvider { internal static readonly SyntaxAnnotation DefinitionAnnotation = new SyntaxAnnotation(); internal static readonly SyntaxAnnotation ReferenceAnnotation = new SyntaxAnnotation(); internal static readonly SyntaxAnnotation InitializerAnnotation = new SyntaxAnnotation(); internal static readonly SyntaxAnnotation ExpressionToInlineAnnotation = new SyntaxAnnotation(); public override async Task ComputeRefactoringsAsync(CodeRefactoringContext context) { var document = context.Document; var textSpan = context.Span; var cancellationToken = context.CancellationToken; if (document.Project.Solution.Workspace.Kind == WorkspaceKind.MiscellaneousFiles) { return; } var root = await document.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false); var token = root.FindToken(textSpan.Start); if (!token.Span.Contains(textSpan)) { return; } var node = token.Parent; if (!node.IsKind(SyntaxKind.VariableDeclarator) || !node.IsParentKind(SyntaxKind.VariableDeclaration) || !node.Parent.IsParentKind(SyntaxKind.LocalDeclarationStatement)) { return; } var variableDeclarator = (VariableDeclaratorSyntax)node; var variableDeclaration = (VariableDeclarationSyntax)variableDeclarator.Parent; var localDeclarationStatement = (LocalDeclarationStatementSyntax)variableDeclaration.Parent; if (variableDeclarator.Identifier != token || variableDeclarator.Initializer == null || variableDeclarator.Initializer.Value.IsMissing || variableDeclarator.Initializer.Value.IsKind(SyntaxKind.StackAllocArrayCreationExpression)) { return; } if (variableDeclarator.Initializer.Kind() == SyntaxKind.RefExpression) { // TODO: inlining byref temps is NYI return; } if (localDeclarationStatement.ContainsDiagnostics) { return; } var references = await GetReferencesAsync(document, variableDeclarator, cancellationToken).ConfigureAwait(false); if (!references.Any()) { return; } context.RegisterRefactoring( new MyCodeAction( CSharpFeaturesResources.Inline_temporary_variable, (c) => this.InlineTemporaryAsync(document, variableDeclarator, c))); } private async Task<IEnumerable<ReferenceLocation>> GetReferencesAsync( Document document, VariableDeclaratorSyntax variableDeclarator, CancellationToken cancellationToken) { var semanticModel = await document.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false); var local = semanticModel.GetDeclaredSymbol(variableDeclarator, cancellationToken); if (local != null) { var findReferencesResult = await SymbolFinder.FindReferencesAsync(local, document.Project.Solution, cancellationToken).ConfigureAwait(false); var locations = findReferencesResult.Single(r => r.Definition == local).Locations; if (!locations.Any(loc => semanticModel.SyntaxTree.OverlapsHiddenPosition(loc.Location.SourceSpan, cancellationToken))) { return locations; } } return SpecializedCollections.EmptyEnumerable<ReferenceLocation>(); } private static bool HasConflict(IdentifierNameSyntax identifier, VariableDeclaratorSyntax variableDeclarator) { // TODO: Check for more conflict types. if (identifier.SpanStart < variableDeclarator.SpanStart) { return true; } var identifierNode = identifier .Ancestors() .TakeWhile(n => n.Kind() == SyntaxKind.ParenthesizedExpression || n.Kind() == SyntaxKind.CastExpression) .LastOrDefault(); if (identifierNode == null) { identifierNode = identifier; } if (identifierNode.IsParentKind(SyntaxKind.Argument)) { var argument = (ArgumentSyntax)identifierNode.Parent; if (argument.RefOrOutKeyword.Kind() != SyntaxKind.None) { return true; } } else if (identifierNode.Parent.IsKind( SyntaxKind.PreDecrementExpression, SyntaxKind.PreIncrementExpression, SyntaxKind.PostDecrementExpression, SyntaxKind.PostIncrementExpression, SyntaxKind.AddressOfExpression)) { return true; } else if (identifierNode.Parent is AssignmentExpressionSyntax) { var binaryExpression = (AssignmentExpressionSyntax)identifierNode.Parent; if (binaryExpression.Left == identifierNode) { return true; } } return false; } private static SyntaxAnnotation CreateConflictAnnotation() { return ConflictAnnotation.Create(CSharpFeaturesResources.Conflict_s_detected); } private async Task<Document> InlineTemporaryAsync(Document document, VariableDeclaratorSyntax declarator, CancellationToken cancellationToken) { var workspace = document.Project.Solution.Workspace; // Annotate the variable declarator so that we can get back to it later. var updatedDocument = await document.ReplaceNodeAsync(declarator, declarator.WithAdditionalAnnotations(DefinitionAnnotation), cancellationToken).ConfigureAwait(false); var semanticModel = await updatedDocument.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false); var variableDeclarator = await FindDeclaratorAsync(updatedDocument, cancellationToken).ConfigureAwait(false); // Create the expression that we're actually going to inline. var expressionToInline = await CreateExpressionToInlineAsync(variableDeclarator, updatedDocument, cancellationToken).ConfigureAwait(false); // Collect the identifier names for each reference. var local = (ILocalSymbol)semanticModel.GetDeclaredSymbol(variableDeclarator, cancellationToken); var symbolRefs = await SymbolFinder.FindReferencesAsync(local, updatedDocument.Project.Solution, cancellationToken).ConfigureAwait(false); var references = symbolRefs.Single(r => r.Definition == local).Locations; var syntaxRoot = await updatedDocument.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false); // Collect the topmost parenting expression for each reference. var nonConflictingIdentifierNodes = references .Select(loc => (IdentifierNameSyntax)syntaxRoot.FindToken(loc.Location.SourceSpan.Start).Parent) .Where(ident => !HasConflict(ident, variableDeclarator)); // Add referenceAnnotations to identifier nodes being replaced. updatedDocument = await updatedDocument.ReplaceNodesAsync( nonConflictingIdentifierNodes, (o, n) => n.WithAdditionalAnnotations(ReferenceAnnotation), cancellationToken).ConfigureAwait(false); semanticModel = await updatedDocument.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false); variableDeclarator = await FindDeclaratorAsync(updatedDocument, cancellationToken).ConfigureAwait(false); // Get the annotated reference nodes. nonConflictingIdentifierNodes = await FindReferenceAnnotatedNodesAsync(updatedDocument, cancellationToken).ConfigureAwait(false); var topmostParentingExpressions = nonConflictingIdentifierNodes .Select(ident => GetTopMostParentingExpression(ident)) .Distinct(); var originalInitializerSymbolInfo = semanticModel.GetSymbolInfo(variableDeclarator.Initializer.Value, cancellationToken); // Make each topmost parenting statement or Equals Clause Expressions semantically explicit. updatedDocument = await updatedDocument.ReplaceNodesAsync(topmostParentingExpressions, (o, n) => Simplifier.Expand(n, semanticModel, workspace, cancellationToken: cancellationToken), cancellationToken).ConfigureAwait(false); semanticModel = await updatedDocument.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false); var semanticModelBeforeInline = semanticModel; variableDeclarator = await FindDeclaratorAsync(updatedDocument, cancellationToken).ConfigureAwait(false); var scope = GetScope(variableDeclarator); var newScope = ReferenceRewriter.Visit(semanticModel, scope, variableDeclarator, expressionToInline, cancellationToken); updatedDocument = await updatedDocument.ReplaceNodeAsync(scope, newScope, cancellationToken).ConfigureAwait(false); semanticModel = await updatedDocument.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false); variableDeclarator = await FindDeclaratorAsync(updatedDocument, cancellationToken).ConfigureAwait(false); newScope = GetScope(variableDeclarator); var conflicts = newScope.GetAnnotatedNodesAndTokens(ConflictAnnotation.Kind); var declaratorConflicts = variableDeclarator.GetAnnotatedNodesAndTokens(ConflictAnnotation.Kind); // Note that we only remove the local declaration if there weren't any conflicts, // unless those conflicts are inside the local declaration. if (conflicts.Count() == declaratorConflicts.Count()) { // Certain semantic conflicts can be detected only after the reference rewriter has inlined the expression var newDocument = await DetectSemanticConflicts(updatedDocument, semanticModel, semanticModelBeforeInline, originalInitializerSymbolInfo, cancellationToken).ConfigureAwait(false); if (updatedDocument == newDocument) { // No semantic conflicts, we can remove the definition. updatedDocument = await updatedDocument.ReplaceNodeAsync(newScope, RemoveDeclaratorFromScope(variableDeclarator, newScope), cancellationToken).ConfigureAwait(false); } else { // There were some semantic conflicts, don't remove the definition. updatedDocument = newDocument; } } return updatedDocument; } private static async Task<VariableDeclaratorSyntax> FindDeclaratorAsync(Document document, CancellationToken cancellationToken) { return await FindNodeWithAnnotationAsync<VariableDeclaratorSyntax>(document, DefinitionAnnotation, cancellationToken).ConfigureAwait(false); } private static async Task<ExpressionSyntax> FindInitializerAsync(Document document, CancellationToken cancellationToken) { return await FindNodeWithAnnotationAsync<ExpressionSyntax>(document, InitializerAnnotation, cancellationToken).ConfigureAwait(false); } private static async Task<T> FindNodeWithAnnotationAsync<T>(Document document, SyntaxAnnotation annotation, CancellationToken cancellationToken) where T : SyntaxNode { var root = await document.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false); return root .GetAnnotatedNodesAndTokens(annotation) .Single() .AsNode() as T; } private static async Task<IEnumerable<IdentifierNameSyntax>> FindReferenceAnnotatedNodesAsync(Document document, CancellationToken cancellationToken) { var root = await document.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false); return FindReferenceAnnotatedNodes(root); } private static IEnumerable<IdentifierNameSyntax> FindReferenceAnnotatedNodes(SyntaxNode root) { var annotatedNodesAndTokens = root.GetAnnotatedNodesAndTokens(ReferenceAnnotation); foreach (var nodeOrToken in annotatedNodesAndTokens) { if (nodeOrToken.IsNode && nodeOrToken.AsNode().IsKind(SyntaxKind.IdentifierName)) { yield return (IdentifierNameSyntax)nodeOrToken.AsNode(); } } } private SyntaxNode GetScope(VariableDeclaratorSyntax variableDeclarator) { var variableDeclaration = (VariableDeclarationSyntax)variableDeclarator.Parent; var localDeclaration = (LocalDeclarationStatementSyntax)variableDeclaration.Parent; var scope = localDeclaration.Parent; while (scope.IsKind(SyntaxKind.LabeledStatement)) { scope = scope.Parent; } var parentExpressions = scope.AncestorsAndSelf().OfType<ExpressionSyntax>(); if (parentExpressions.Any()) { scope = parentExpressions.LastOrDefault().Parent; } return scope; } private VariableDeclaratorSyntax FindDeclarator(SyntaxNode node) { var annotatedNodesOrTokens = node.GetAnnotatedNodesAndTokens(DefinitionAnnotation).ToList(); Contract.Requires(annotatedNodesOrTokens.Count == 1, "Only a single variable declarator should have been annotated."); return (VariableDeclaratorSyntax)annotatedNodesOrTokens.First().AsNode(); } private SyntaxTriviaList GetTriviaToPreserve(SyntaxTriviaList syntaxTriviaList) { return ShouldPreserve(syntaxTriviaList) ? syntaxTriviaList : default(SyntaxTriviaList); } private static bool ShouldPreserve(SyntaxTriviaList trivia) { return trivia.Any(t => t.IsRegularComment() || t.IsDirective); } private SyntaxNode RemoveDeclaratorFromVariableList(VariableDeclaratorSyntax variableDeclarator, VariableDeclarationSyntax variableDeclaration) { Debug.Assert(variableDeclaration.Variables.Count > 1); Debug.Assert(variableDeclaration.Variables.Contains(variableDeclarator)); var localDeclaration = (LocalDeclarationStatementSyntax)variableDeclaration.Parent; var scope = GetScope(variableDeclarator); var newLocalDeclaration = localDeclaration.RemoveNode(variableDeclarator, SyntaxRemoveOptions.KeepNoTrivia) .WithAdditionalAnnotations(Formatter.Annotation); return scope.ReplaceNode(localDeclaration, newLocalDeclaration); } private SyntaxNode RemoveDeclaratorFromScope(VariableDeclaratorSyntax variableDeclarator, SyntaxNode scope) { var variableDeclaration = (VariableDeclarationSyntax)variableDeclarator.Parent; // If there is more than one variable declarator, remove this one from the variable declaration. if (variableDeclaration.Variables.Count > 1) { return RemoveDeclaratorFromVariableList(variableDeclarator, variableDeclaration); } var localDeclaration = (LocalDeclarationStatementSyntax)variableDeclaration.Parent; // There's only one variable declarator, so we'll remove the local declaration // statement entirely. This means that we'll concatenate the leading and trailing // trivia of this declaration and move it to the next statement. var leadingTrivia = localDeclaration .GetLeadingTrivia() .Reverse() .SkipWhile(t => t.MatchesKind(SyntaxKind.WhitespaceTrivia)) .Reverse() .ToSyntaxTriviaList(); var trailingTrivia = localDeclaration .GetTrailingTrivia() .SkipWhile(t => t.MatchesKind(SyntaxKind.WhitespaceTrivia, SyntaxKind.EndOfLineTrivia)) .ToSyntaxTriviaList(); var newLeadingTrivia = leadingTrivia.Concat(trailingTrivia); var nextToken = localDeclaration.GetLastToken().GetNextTokenOrEndOfFile(); var newNextToken = nextToken.WithPrependedLeadingTrivia(newLeadingTrivia) .WithAdditionalAnnotations(Formatter.Annotation); var newScope = scope.ReplaceToken(nextToken, newNextToken); var newLocalDeclaration = (LocalDeclarationStatementSyntax)FindDeclarator(newScope).Parent.Parent; // If the local is parented by a label statement, we can't remove this statement. Instead, // we'll replace the local declaration with an empty expression statement. if (newLocalDeclaration.IsParentKind(SyntaxKind.LabeledStatement)) { var labeledStatement = (LabeledStatementSyntax)newLocalDeclaration.Parent; var newLabeledStatement = labeledStatement.ReplaceNode(newLocalDeclaration, SyntaxFactory.ParseStatement("")); return newScope.ReplaceNode(labeledStatement, newLabeledStatement); } return newScope.RemoveNode(newLocalDeclaration, SyntaxRemoveOptions.KeepNoTrivia); } private ExpressionSyntax SkipRedundantExteriorParentheses(ExpressionSyntax expression) { while (expression.IsKind(SyntaxKind.ParenthesizedExpression)) { var parenthesized = (ParenthesizedExpressionSyntax)expression; if (parenthesized.Expression == null || parenthesized.Expression.IsMissing) { break; } if (parenthesized.Expression.IsKind(SyntaxKind.ParenthesizedExpression) || parenthesized.Expression.IsKind(SyntaxKind.IdentifierName)) { expression = parenthesized.Expression; } else { break; } } return expression; } private async Task<ExpressionSyntax> CreateExpressionToInlineAsync( VariableDeclaratorSyntax variableDeclarator, Document document, CancellationToken cancellationToken) { var updatedDocument = document; var expression = SkipRedundantExteriorParentheses(variableDeclarator.Initializer.Value); var semanticModel = await updatedDocument.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false); var localSymbol = (ILocalSymbol)semanticModel.GetDeclaredSymbol(variableDeclarator, cancellationToken); var newExpression = InitializerRewriter.Visit(expression, localSymbol, semanticModel); // If this is an array initializer, we need to transform it into an array creation // expression for inlining. if (newExpression.Kind() == SyntaxKind.ArrayInitializerExpression) { var arrayType = (ArrayTypeSyntax)localSymbol.Type.GenerateTypeSyntax(); var arrayInitializer = (InitializerExpressionSyntax)newExpression; // Add any non-whitespace trailing trivia from the equals clause to the type. var equalsToken = variableDeclarator.Initializer.EqualsToken; if (equalsToken.HasTrailingTrivia) { var trailingTrivia = equalsToken.TrailingTrivia.SkipInitialWhitespace(); if (trailingTrivia.Any()) { arrayType = arrayType.WithTrailingTrivia(trailingTrivia); } } newExpression = SyntaxFactory.ArrayCreationExpression(arrayType, arrayInitializer); } newExpression = newExpression.WithAdditionalAnnotations(InitializerAnnotation); updatedDocument = await updatedDocument.ReplaceNodeAsync(variableDeclarator.Initializer.Value, newExpression, cancellationToken).ConfigureAwait(false); semanticModel = await updatedDocument.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false); newExpression = await FindInitializerAsync(updatedDocument, cancellationToken).ConfigureAwait(false); var newVariableDeclarator = await FindDeclaratorAsync(updatedDocument, cancellationToken).ConfigureAwait(false); localSymbol = (ILocalSymbol)semanticModel.GetDeclaredSymbol(newVariableDeclarator, cancellationToken); var explicitCastExpression = newExpression.CastIfPossible(localSymbol.Type, newVariableDeclarator.SpanStart, semanticModel); if (explicitCastExpression != newExpression) { updatedDocument = await updatedDocument.ReplaceNodeAsync(newExpression, explicitCastExpression, cancellationToken).ConfigureAwait(false); semanticModel = await updatedDocument.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false); newVariableDeclarator = await FindDeclaratorAsync(updatedDocument, cancellationToken).ConfigureAwait(false); } // Now that the variable declarator is normalized, make its initializer // value semantically explicit. newExpression = await Simplifier.ExpandAsync(newVariableDeclarator.Initializer.Value, updatedDocument, cancellationToken: cancellationToken).ConfigureAwait(false); return newExpression.WithAdditionalAnnotations(ExpressionToInlineAnnotation); } private static SyntaxNode GetTopMostParentingExpression(ExpressionSyntax expression) { return expression.AncestorsAndSelf().OfType<ExpressionSyntax>().Last(); } private static async Task<Document> DetectSemanticConflicts( Document inlinedDocument, SemanticModel newSemanticModelForInlinedDocument, SemanticModel semanticModelBeforeInline, SymbolInfo originalInitializerSymbolInfo, CancellationToken cancellationToken) { // In this method we detect if inlining the expression introduced the following semantic change: // The symbol info associated with any of the inlined expressions does not match the symbol info for original initializer expression prior to inline. // If any semantic changes were introduced by inlining, we update the document with conflict annotations. // Otherwise we return the given inlined document without any changes. var syntaxRootBeforeInline = await semanticModelBeforeInline.SyntaxTree.GetRootAsync(cancellationToken).ConfigureAwait(false); // Get all the identifier nodes which were replaced with inlined expression. var originalIdentifierNodes = FindReferenceAnnotatedNodes(syntaxRootBeforeInline); if (originalIdentifierNodes.IsEmpty()) { // No conflicts return inlinedDocument; } // Get all the inlined expression nodes. var syntaxRootAfterInline = await inlinedDocument.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false); var inlinedExprNodes = syntaxRootAfterInline.GetAnnotatedNodesAndTokens(ExpressionToInlineAnnotation); Debug.Assert(originalIdentifierNodes.Count() == inlinedExprNodes.Count()); Dictionary<SyntaxNode, SyntaxNode> replacementNodesWithChangedSemantics = null; using (var originalNodesEnum = originalIdentifierNodes.GetEnumerator()) { using (var inlinedNodesOrTokensEnum = inlinedExprNodes.GetEnumerator()) { while (originalNodesEnum.MoveNext()) { inlinedNodesOrTokensEnum.MoveNext(); var originalNode = originalNodesEnum.Current; // expressionToInline is Parenthesized prior to replacement, so get the parenting parenthesized expression. var inlinedNode = (ExpressionSyntax)inlinedNodesOrTokensEnum.Current.Parent; Debug.Assert(inlinedNode.IsKind(SyntaxKind.ParenthesizedExpression)); // inlinedNode is the expanded form of the actual initializer expression in the original document. // We have annotated the inner initializer with a special syntax annotation "InitializerAnnotation". // Get this annotated node and compute the symbol info for this node in the inlined document. var innerInitializerInInlineNodeOrToken = inlinedNode.GetAnnotatedNodesAndTokens(InitializerAnnotation).First(); ExpressionSyntax innerInitializerInInlineNode = (ExpressionSyntax)(innerInitializerInInlineNodeOrToken.IsNode ? innerInitializerInInlineNodeOrToken.AsNode() : innerInitializerInInlineNodeOrToken.AsToken().Parent); var newInitializerSymbolInfo = newSemanticModelForInlinedDocument.GetSymbolInfo(innerInitializerInInlineNode, cancellationToken); // Verification: The symbol info associated with any of the inlined expressions does not match the symbol info for original initializer expression prior to inline. if (!SpeculationAnalyzer.SymbolInfosAreCompatible(originalInitializerSymbolInfo, newInitializerSymbolInfo, performEquivalenceCheck: true)) { newInitializerSymbolInfo = newSemanticModelForInlinedDocument.GetSymbolInfo(inlinedNode, cancellationToken); if (!SpeculationAnalyzer.SymbolInfosAreCompatible(originalInitializerSymbolInfo, newInitializerSymbolInfo, performEquivalenceCheck: true)) { if (replacementNodesWithChangedSemantics == null) { replacementNodesWithChangedSemantics = new Dictionary<SyntaxNode, SyntaxNode>(); } replacementNodesWithChangedSemantics.Add(inlinedNode, originalNode); } } } } } if (replacementNodesWithChangedSemantics == null) { // No conflicts. return inlinedDocument; } // Replace the conflicting inlined nodes with the original nodes annotated with conflict annotation. Func<SyntaxNode, SyntaxNode, SyntaxNode> conflictAnnotationAdder = (SyntaxNode oldNode, SyntaxNode newNode) => newNode.WithAdditionalAnnotations(ConflictAnnotation.Create(CSharpFeaturesResources.Conflict_s_detected)); return await inlinedDocument.ReplaceNodesAsync(replacementNodesWithChangedSemantics.Keys, conflictAnnotationAdder, cancellationToken).ConfigureAwait(false); } private class MyCodeAction : CodeAction.DocumentChangeAction { public MyCodeAction(string title, Func<CancellationToken, Task<Document>> createChangedDocument) : base(title, createChangedDocument) { } } } }
using System; using System.Collections.Generic; using System.Linq; using System.Security.Claims; using System.Threading.Tasks; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Identity; using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc.Rendering; using Microsoft.Extensions.Logging; using CoreCraft.Models; using CoreCraft.Models.AccountViewModels; using CoreCraft.Services; namespace CoreCraft.Controllers { [Authorize] public class AccountController : Controller { private readonly UserManager<ApplicationUser> _userManager; private readonly SignInManager<ApplicationUser> _signInManager; private readonly IEmailSender _emailSender; private readonly ISmsSender _smsSender; private readonly ILogger _logger; public AccountController( UserManager<ApplicationUser> userManager, SignInManager<ApplicationUser> signInManager, IEmailSender emailSender, ISmsSender smsSender, ILoggerFactory loggerFactory) { _userManager = userManager; _signInManager = signInManager; _emailSender = emailSender; _smsSender = smsSender; _logger = loggerFactory.CreateLogger<AccountController>(); } // // GET: /Account/Login [HttpGet] [AllowAnonymous] public IActionResult Login(string returnUrl = null) { ViewData["ReturnUrl"] = returnUrl; return View(); } // // POST: /Account/Login [HttpPost] [AllowAnonymous] [ValidateAntiForgeryToken] public async Task<IActionResult> Login(LoginViewModel model, string returnUrl = null) { ViewData["ReturnUrl"] = returnUrl; if (ModelState.IsValid) { // This doesn't count login failures towards account lockout // To enable password failures to trigger account lockout, set lockoutOnFailure: true var result = await _signInManager.PasswordSignInAsync(model.Email, model.Password, model.RememberMe, lockoutOnFailure: false); if (result.Succeeded) { _logger.LogInformation(1, "User logged in."); return RedirectToLocal(returnUrl); } if (result.RequiresTwoFactor) { return RedirectToAction(nameof(SendCode), new {ReturnUrl = returnUrl, RememberMe = model.RememberMe}); } if (result.IsLockedOut) { _logger.LogWarning(2, "User account locked out."); return View("Lockout"); } else { ModelState.AddModelError(string.Empty, "Invalid login attempt."); return View(model); } } // If we got this far, something failed, redisplay form return View(model); } // // GET: /Account/Register [HttpGet] [AllowAnonymous] public IActionResult Register(string returnUrl = null) { ViewData["ReturnUrl"] = returnUrl; return View(); } // // POST: /Account/Register [HttpPost] [AllowAnonymous] [ValidateAntiForgeryToken] public async Task<IActionResult> Register(RegisterViewModel model, string returnUrl = null) { ViewData["ReturnUrl"] = returnUrl; if (ModelState.IsValid) { var user = new ApplicationUser {UserName = model.Email, Email = model.Email}; var result = await _userManager.CreateAsync(user, model.Password); if (result.Succeeded) { // For more information on how to enable account confirmation and password reset please visit https://go.microsoft.com/fwlink/?LinkID=532713 // Send an email with this link //var code = await _userManager.GenerateEmailConfirmationTokenAsync(user); //var callbackUrl = Url.Action("ConfirmEmail", "Account", new { userId = user.Id, code = code }, protocol: HttpContext.Request.Scheme); //await _emailSender.SendEmailAsync(model.Email, "Confirm your account", // $"Please confirm your account by clicking this link: <a href='{callbackUrl}'>link</a>"); await _signInManager.SignInAsync(user, isPersistent: false); _logger.LogInformation(3, "User created a new account with password."); return RedirectToLocal(returnUrl); } AddErrors(result); } // If we got this far, something failed, redisplay form return View(model); } // // POST: /Account/LogOff [HttpPost] [ValidateAntiForgeryToken] public async Task<IActionResult> LogOff() { await _signInManager.SignOutAsync(); _logger.LogInformation(4, "User logged out."); return RedirectToAction(nameof(HomeController.Index), "Home"); } // // POST: /Account/ExternalLogin [HttpPost] [AllowAnonymous] [ValidateAntiForgeryToken] public IActionResult ExternalLogin(string provider, string returnUrl = null) { // Request a redirect to the external login provider. var redirectUrl = Url.Action("ExternalLoginCallback", "Account", new {ReturnUrl = returnUrl}); var properties = _signInManager.ConfigureExternalAuthenticationProperties(provider, redirectUrl); return Challenge(properties, provider); } // // GET: /Account/ExternalLoginCallback [HttpGet] [AllowAnonymous] public async Task<IActionResult> ExternalLoginCallback(string returnUrl = null, string remoteError = null) { if (remoteError != null) { ModelState.AddModelError(string.Empty, $"Error from external provider: {remoteError}"); return View(nameof(Login)); } var info = await _signInManager.GetExternalLoginInfoAsync(); if (info == null) { return RedirectToAction(nameof(Login)); } // Sign in the user with this external login provider if the user already has a login. var result = await _signInManager.ExternalLoginSignInAsync(info.LoginProvider, info.ProviderKey, isPersistent: false); if (result.Succeeded) { _logger.LogInformation(5, "User logged in with {Name} provider.", info.LoginProvider); return RedirectToLocal(returnUrl); } if (result.RequiresTwoFactor) { return RedirectToAction(nameof(SendCode), new {ReturnUrl = returnUrl}); } if (result.IsLockedOut) { return View("Lockout"); } else { // If the user does not have an account, then ask the user to create an account. ViewData["ReturnUrl"] = returnUrl; ViewData["LoginProvider"] = info.LoginProvider; var email = info.Principal.FindFirstValue(ClaimTypes.Email); return View("ExternalLoginConfirmation", new ExternalLoginConfirmationViewModel {Email = email}); } } // // POST: /Account/ExternalLoginConfirmation [HttpPost] [AllowAnonymous] [ValidateAntiForgeryToken] public async Task<IActionResult> ExternalLoginConfirmation(ExternalLoginConfirmationViewModel model, string returnUrl = null) { if (ModelState.IsValid) { // Get the information about the user from the external login provider var info = await _signInManager.GetExternalLoginInfoAsync(); if (info == null) { return View("ExternalLoginFailure"); } var user = new ApplicationUser {UserName = model.Email, Email = model.Email}; var result = await _userManager.CreateAsync(user); if (result.Succeeded) { result = await _userManager.AddLoginAsync(user, info); if (result.Succeeded) { await _signInManager.SignInAsync(user, isPersistent: false); _logger.LogInformation(6, "User created an account using {Name} provider.", info.LoginProvider); return RedirectToLocal(returnUrl); } } AddErrors(result); } ViewData["ReturnUrl"] = returnUrl; return View(model); } // GET: /Account/ConfirmEmail [HttpGet] [AllowAnonymous] public async Task<IActionResult> ConfirmEmail(string userId, string code) { if (userId == null || code == null) { return View("Error"); } var user = await _userManager.FindByIdAsync(userId); if (user == null) { return View("Error"); } var result = await _userManager.ConfirmEmailAsync(user, code); return View(result.Succeeded ? "ConfirmEmail" : "Error"); } // // GET: /Account/ForgotPassword [HttpGet] [AllowAnonymous] public IActionResult ForgotPassword() { return View(); } // // POST: /Account/ForgotPassword [HttpPost] [AllowAnonymous] [ValidateAntiForgeryToken] public async Task<IActionResult> ForgotPassword(ForgotPasswordViewModel model) { if (ModelState.IsValid) { var user = await _userManager.FindByNameAsync(model.Email); if (user == null || !(await _userManager.IsEmailConfirmedAsync(user))) { // Don't reveal that the user does not exist or is not confirmed return View("ForgotPasswordConfirmation"); } // For more information on how to enable account confirmation and password reset please visit https://go.microsoft.com/fwlink/?LinkID=532713 // Send an email with this link //var code = await _userManager.GeneratePasswordResetTokenAsync(user); //var callbackUrl = Url.Action("ResetPassword", "Account", new { userId = user.Id, code = code }, protocol: HttpContext.Request.Scheme); //await _emailSender.SendEmailAsync(model.Email, "Reset Password", // $"Please reset your password by clicking here: <a href='{callbackUrl}'>link</a>"); //return View("ForgotPasswordConfirmation"); } // If we got this far, something failed, redisplay form return View(model); } // // GET: /Account/ForgotPasswordConfirmation [HttpGet] [AllowAnonymous] public IActionResult ForgotPasswordConfirmation() { return View(); } // // GET: /Account/ResetPassword [HttpGet] [AllowAnonymous] public IActionResult ResetPassword(string code = null) { return code == null ? View("Error") : View(); } // // POST: /Account/ResetPassword [HttpPost] [AllowAnonymous] [ValidateAntiForgeryToken] public async Task<IActionResult> ResetPassword(ResetPasswordViewModel model) { if (!ModelState.IsValid) { return View(model); } var user = await _userManager.FindByNameAsync(model.Email); if (user == null) { // Don't reveal that the user does not exist return RedirectToAction(nameof(AccountController.ResetPasswordConfirmation), "Account"); } var result = await _userManager.ResetPasswordAsync(user, model.Code, model.Password); if (result.Succeeded) { return RedirectToAction(nameof(AccountController.ResetPasswordConfirmation), "Account"); } AddErrors(result); return View(); } // // GET: /Account/ResetPasswordConfirmation [HttpGet] [AllowAnonymous] public IActionResult ResetPasswordConfirmation() { return View(); } // // GET: /Account/SendCode [HttpGet] [AllowAnonymous] public async Task<ActionResult> SendCode(string returnUrl = null, bool rememberMe = false) { var user = await _signInManager.GetTwoFactorAuthenticationUserAsync(); if (user == null) { return View("Error"); } var userFactors = await _userManager.GetValidTwoFactorProvidersAsync(user); var factorOptions = userFactors.Select(purpose => new SelectListItem {Text = purpose, Value = purpose}).ToList(); return View(new SendCodeViewModel {Providers = factorOptions, ReturnUrl = returnUrl, RememberMe = rememberMe}); } // // POST: /Account/SendCode [HttpPost] [AllowAnonymous] [ValidateAntiForgeryToken] public async Task<IActionResult> SendCode(SendCodeViewModel model) { if (!ModelState.IsValid) { return View(); } var user = await _signInManager.GetTwoFactorAuthenticationUserAsync(); if (user == null) { return View("Error"); } // Generate the token and send it var code = await _userManager.GenerateTwoFactorTokenAsync(user, model.SelectedProvider); if (string.IsNullOrWhiteSpace(code)) { return View("Error"); } var message = "Your security code is: " + code; if (model.SelectedProvider == "Email") { await _emailSender.SendEmailAsync(await _userManager.GetEmailAsync(user), "Security Code", message); } else if (model.SelectedProvider == "Phone") { await _smsSender.SendSmsAsync(await _userManager.GetPhoneNumberAsync(user), message); } return RedirectToAction(nameof(VerifyCode), new {Provider = model.SelectedProvider, ReturnUrl = model.ReturnUrl, RememberMe = model.RememberMe}); } // // GET: /Account/VerifyCode [HttpGet] [AllowAnonymous] public async Task<IActionResult> VerifyCode(string provider, bool rememberMe, string returnUrl = null) { // Require that the user has already logged in via username/password or external login var user = await _signInManager.GetTwoFactorAuthenticationUserAsync(); if (user == null) { return View("Error"); } return View(new VerifyCodeViewModel {Provider = provider, ReturnUrl = returnUrl, RememberMe = rememberMe}); } // // POST: /Account/VerifyCode [HttpPost] [AllowAnonymous] [ValidateAntiForgeryToken] public async Task<IActionResult> VerifyCode(VerifyCodeViewModel model) { if (!ModelState.IsValid) { return View(model); } // The following code protects for brute force attacks against the two factor codes. // If a user enters incorrect codes for a specified amount of time then the user account // will be locked out for a specified amount of time. var result = await _signInManager.TwoFactorSignInAsync(model.Provider, model.Code, model.RememberMe, model.RememberBrowser); if (result.Succeeded) { return RedirectToLocal(model.ReturnUrl); } if (result.IsLockedOut) { _logger.LogWarning(7, "User account locked out."); return View("Lockout"); } else { ModelState.AddModelError(string.Empty, "Invalid code."); return View(model); } } #region Helpers private void AddErrors(IdentityResult result) { foreach (var error in result.Errors) { ModelState.AddModelError(string.Empty, error.Description); } } private Task<ApplicationUser> GetCurrentUserAsync() { return _userManager.GetUserAsync(HttpContext.User); } private IActionResult RedirectToLocal(string returnUrl) { if (Url.IsLocalUrl(returnUrl)) { return Redirect(returnUrl); } else { return RedirectToAction(nameof(HomeController.Index), "Home"); } } #endregion } }
using System; using System.Collections; using System.Collections.Generic; using Castle.MicroKernel; using Castle.Windsor; using Rhino.Mocks; namespace Rhino.Testing.AutoMocking { public class AutoMockingContainer : WindsorContainer, IAutoMockingRepository, IGenericMockingRepository { private readonly IList<Type> _markMissing = new List<Type>(); private readonly MockRepository _mocks; private readonly Dictionary<Type, object> _services = new Dictionary<Type, object>(); private readonly Dictionary<Type, IMockingStrategy> _strategies = new Dictionary<Type, IMockingStrategy>(); private bool _resolveProperties; public AutoMockingContainer(MockRepository mocks) : this(mocks, false) { } public AutoMockingContainer(MockRepository mocks, bool resolveProperties) { _mocks = mocks; _resolveProperties = resolveProperties; } #region IAutoMockingRepository Members public virtual MockRepository MockRepository { get { return _mocks; } } private new object GetService(Type type) { if (_services.ContainsKey(type)) return _services[type]; return null; } #region DefaultMockingStrategy private IMockingStrategy m_DefaultMockingStrategy; /// <summary> /// Gets or sets the default mocking strategy., which will be returned, if a <see cref="Type"/> was not explicitly marked via a <see cref="TypeMarker"/>. /// The default is the <see cref="DynamicMockingStrategy"/>, /// which will always be returned, if no other was defined. /// </summary> /// <value>The default mocking strategy.</value> public IMockingStrategy DefaultMockingStrategy { get { if (m_DefaultMockingStrategy == null) { m_DefaultMockingStrategy = new DynamicMockingStrategy(this); } return m_DefaultMockingStrategy; } set { m_DefaultMockingStrategy = value; } } #endregion public virtual IMockingStrategy GetMockingStrategy(Type type) { if (_strategies.ContainsKey(type)) { return _strategies[type]; } return DefaultMockingStrategy; } public virtual IMockingStrategy GetMockingStrategy<T>() { return GetMockingStrategy(typeof (T)); } public bool CanResolve(Type type) { return _markMissing.Contains(type) == false; } #endregion /// <summary> /// Initializes this instance. Must be called, before you can work with the instance. /// </summary> public void Initialize() { Kernel.AddSubSystem(SubSystemConstants.NamingKey,new AutoMockingNamingSubSystem(this)); Kernel.AddFacility("AutoMockingFacility", new AutoMockingFacility(this)); Kernel.ComponentModelBuilder.AddContributor(new NonPublicConstructorDependenciesModelInspector()); Kernel.ComponentModelCreated += Kernel_ComponentModelCreated; } void Kernel_ComponentModelCreated(Castle.Core.ComponentModel model) { if (model.CustomComponentActivator!=null) return; model.CustomComponentActivator = ResolveProperties ? typeof (AutoMockingComponentActivator) : typeof (NonPropertyResolvingComponentActivator); } private void AddComponentIfMissing<T>() { Type targetType = typeof(T); if (!Kernel.HasComponent(targetType.FullName)) AddComponent(targetType.FullName, targetType); } /// <summary> /// Create an instance of type <typeparamref name="T"/> with its constructor declared /// dependencies resolved as mocks /// </summary> /// <remarks> /// For each constructor dependency that <typeparamref name="T"/> declares, use <see /// cref="Mark"/> to determine the type of mock (stub, dynamic mock, etc) that will be /// injected into the instance. The default is to inject a dynmaic mock. /// <para> /// Use <see cref="MarkMissing"/> for a constructor dependency that you do not want the /// container to resolve. /// </para> /// <para> /// If you want a dependency to be resolved as an explicit type rather than as a mock, use /// <see cref="IWindsorContainer.AddComponent(string,Type,Type)"/> to register the explict /// type that the container should use before calling this method. /// </para> /// </remarks> public T Create<T>() { AddComponentIfMissing<T>(); return Resolve<T>(); } /// <summary> /// See <see cref="Create{T}()"/> /// </summary> public T Create<T>(IDictionary parameters) { AddComponentIfMissing<T>(); return Resolve<T>(parameters); } /// <summary> /// Returns a mock of the specified <paramref name="type"/> /// </summary> /// <remarks> /// Use <see cref="Mark"/> to determine the type of mock (stub, dynamic mock, etc) that will be /// returned. A dynamic mock will be returned by default /// <para> /// Only a single instance of the specified <paramref name="type"/> will be created and returned /// </para> /// </remarks> public object Get(Type type) { if (type == typeof(IKernel)) return Kernel; object t = GetService(type); if (t != null) return t; object instance = GetMockingStrategy(type).Create(CreationContext.Empty, type); AddService(type, instance); return instance; } /// <summary> /// See <see cref="Get"/> /// </summary> public T Get<T>() where T : class { return (T) Get(typeof (T)); } public void SetMockingStrategy(Type type, IMockingStrategy strategy) { _strategies[type] = strategy; } public void SetMockingStrategy<T>(IMockingStrategy strategy) { SetMockingStrategy(typeof(T), strategy); } public void AddService(Type type, object service) { _services[type] = service; } public void AddService<T>(T service) { AddService(typeof (T), service); } /// <summary> /// See <see cref="IAutoMockingRepository.Mark"/> /// </summary> public TypeMarker Mark(Type type) { return new TypeMarker(type, this); } public bool CanResolveFromMockRepository(Type service) { return _markMissing.Contains(service) == false && GetMockingStrategy(service).GetType() != typeof (NonMockedStrategy); } /// <summary> /// See <see cref="IAutoMockingRepository.Mark"/> /// </summary> public TypeMarker Mark<T>() { return Mark(typeof (T)); } /// <summary> /// See <see cref="MarkMissing"/> /// </summary> public void MarkMissing<T>() { MarkMissing(typeof (T)); } /// <summary> /// Indicate that instances of <paramref name="type"/> should not be resolved by the container /// <seealso cref="Create{T}()"/> /// </summary> public void MarkMissing(Type type) { _markMissing.Add(type); } public bool ResolveProperties { get { return _resolveProperties; } set { _resolveProperties = value; } } } }
#region license // Copyright (c) 2004, Rodrigo B. de Oliveira ([email protected]) // All rights reserved. // // Redistribution and use in source and binary forms, with or without modification, // are permitted provided that the following conditions are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // * Neither the name of Rodrigo B. de Oliveira nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF // THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #endregion using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Reflection; using System.Text.RegularExpressions; using Boo.Lang.Compiler.Ast; using Boo.Lang.Compiler.Util; using Boo.Lang.Compiler.TypeSystem; using Boo.Lang.Compiler.TypeSystem.Reflection; using Boo.Lang.Environments; using Boo.Lang.Resources; namespace Boo.Lang.Compiler { /// <summary> /// Compiler parameters. /// </summary> public class CompilerParameters { public static IReflectionTypeSystemProvider SharedTypeSystemProvider = new ReflectionTypeSystemProvider(); private TextWriter _outputWriter; private readonly CompilerInputCollection _input; private readonly CompilerResourceCollection _resources; private CompilerReferenceCollection _compilerReferences; private string _outputAssembly; private bool _strict; private readonly List<string> _libPaths; private readonly string _systemDir; private Assembly _booAssembly; private readonly Dictionary<string, string> _defines = new Dictionary<string, string>(StringComparer.Ordinal); private TypeMemberModifiers _defaultTypeVisibility = TypeMemberModifiers.Public; private TypeMemberModifiers _defaultMethodVisibility = TypeMemberModifiers.Public; private TypeMemberModifiers _defaultPropertyVisibility = TypeMemberModifiers.Public; private TypeMemberModifiers _defaultEventVisibility = TypeMemberModifiers.Public; private TypeMemberModifiers _defaultFieldVisibility = TypeMemberModifiers.Protected; private bool _defaultVisibilitySettingsRead; public CompilerParameters() : this(true) { } public CompilerParameters(bool loadDefaultReferences) : this(SharedTypeSystemProvider, loadDefaultReferences) { } public CompilerParameters(IReflectionTypeSystemProvider reflectionProvider) : this(reflectionProvider, true) { } public CompilerParameters(IReflectionTypeSystemProvider reflectionProvider, bool loadDefaultReferences) { _libPaths = new List<string>(); _systemDir = Permissions.WithDiscoveryPermission(() => GetSystemDir()); if (_systemDir != null) { _libPaths.Add(_systemDir); _libPaths.Add(Directory.GetCurrentDirectory()); } _input = new CompilerInputCollection(); _resources = new CompilerResourceCollection(); _compilerReferences = new CompilerReferenceCollection(reflectionProvider); MaxExpansionIterations = 12; _outputAssembly = String.Empty; OutputType = CompilerOutputType.Auto; _outputWriter = Console.Out; Debug = true; Checked = true; GenerateInMemory = true; StdLib = true; DelaySign = false; Strict = false; TraceLevel = DefaultTraceLevel(); if (loadDefaultReferences) LoadDefaultReferences(); } private static TraceLevel DefaultTraceLevel() { var booTraceLevel = Permissions.WithEnvironmentPermission(() => System.Environment.GetEnvironmentVariable("BOO_TRACE_LEVEL")); return string.IsNullOrEmpty(booTraceLevel) ? TraceLevel.Off : (TraceLevel)Enum.Parse(typeof(TraceLevel), booTraceLevel); } public void LoadDefaultReferences() { //boo.lang.dll _booAssembly = typeof(Builtins).Assembly; _compilerReferences.Add(_booAssembly); //boo.lang.extensions.dll //try loading extensions next to Boo.Lang (in the same directory) var extensionsAssembly = TryToLoadExtensionsAssembly(); if (extensionsAssembly != null) _compilerReferences.Add(extensionsAssembly); //mscorlib _compilerReferences.Add(LoadAssembly("mscorlib", true)); //System _compilerReferences.Add(LoadAssembly("System", true)); //System.Core _compilerReferences.Add(LoadAssembly("System.Core", true)); Permissions.WithDiscoveryPermission<object>(() => { WriteTraceInfo("BOO LANG DLL: " + _booAssembly.Location); WriteTraceInfo("BOO COMPILER EXTENSIONS DLL: " + (extensionsAssembly != null ? extensionsAssembly.ToString() : "NOT FOUND!")); return null; }); } private IAssemblyReference TryToLoadExtensionsAssembly() { const string booLangExtensionsDll = "Boo.Lang.Extensions.dll"; var tentative = Permissions.WithDiscoveryPermission(() => Path.Combine(Path.GetDirectoryName(_booAssembly.Location), booLangExtensionsDll)) ?? booLangExtensionsDll; return LoadAssembly(tentative, false) ?? LoadAssembly("Boo.Lang.Extensions", false); } public Assembly BooAssembly { get { return _booAssembly; } set { if (null == value) throw new ArgumentNullException("value"); if (value != _booAssembly) { _compilerReferences.Remove(_booAssembly); _booAssembly = value; _compilerReferences.Add(value); } } } public ICompileUnit FindAssembly(string name) { return _compilerReferences.Find(name); } public void AddAssembly(Assembly asm) { if (null == asm) throw new ArgumentNullException(); _compilerReferences.Add(asm); } public IAssemblyReference LoadAssembly(string assembly) { return LoadAssembly(assembly, true); } public IAssemblyReference LoadAssembly(string assemblyName, bool throwOnError) { var assembly = ForName(assemblyName, throwOnError); if (null == assembly) return null; return _compilerReferences.Provider.ForAssembly(assembly); } private Assembly ForName(string assembly, bool throwOnError) { Assembly a = null; try { if (assembly.IndexOfAny(new char[] {'/', '\\'}) != -1) { //nant passes full path to gac dlls, which compiler doesn't like: //if (assembly.ToLower().StartsWith(_systemDir.ToLower())) { //return LoadAssemblyFromGac(Path.GetFileName(assembly)); } //else //load using path { a = Assembly.LoadFrom(assembly); } } else { a = LoadAssemblyFromGac(assembly); } } catch (FileNotFoundException /*ignored*/) { return LoadAssemblyFromLibPaths(assembly, throwOnError); } catch (BadImageFormatException e) { if (throwOnError) throw new ApplicationException(string.Format(Boo.Lang.Resources.StringResources.BooC_BadFormat, e.FusionLog), e); } catch (FileLoadException e) { if (throwOnError) throw new ApplicationException(string.Format(Boo.Lang.Resources.StringResources.BooC_UnableToLoadAssembly, e.FusionLog), e); } catch (ArgumentNullException e) { if (throwOnError) throw new ApplicationException(Boo.Lang.Resources.StringResources.BooC_NullAssembly, e); } return a ?? LoadAssemblyFromLibPaths(assembly, false); } private Assembly LoadAssemblyFromLibPaths(string assembly, bool throwOnError) { Assembly a = null; string fullLog = ""; foreach (string dir in _libPaths) { string full_path = Path.Combine(dir, assembly); FileInfo file = new FileInfo(full_path); if (!IsAssemblyExtension(file.Extension)) full_path += ".dll"; try { a = Assembly.LoadFrom(full_path); if (a != null) { return a; } } catch (FileNotFoundException ff) { fullLog += ff.FusionLog; continue; } } if (throwOnError) { throw new ApplicationException(string.Format(Boo.Lang.Resources.StringResources.BooC_CannotFindAssembly, assembly)); //assembly, total_log)); //total_log contains the fusion log } return a; } private static bool IsAssemblyExtension(string extension) { switch (extension.ToLower()) { case ".dll": case ".exe": return true; } return false; } private static Assembly LoadAssemblyFromGac(string assemblyName) { assemblyName = NormalizeAssemblyName(assemblyName); // This is an intentional attempt to load an assembly with partial name // so ignore the compiler warning #pragma warning disable 618 var assembly = Permissions.WithDiscoveryPermission(()=> Assembly.LoadWithPartialName(assemblyName)); #pragma warning restore 618 return assembly ?? Assembly.Load(assemblyName); } private static string NormalizeAssemblyName(string assembly) { var extension = Path.GetExtension(assembly).ToLower(); if (extension == ".dll" || extension == ".exe") return assembly.Substring(0, assembly.Length - 4); return assembly; } public void LoadReferencesFromPackage(string package) { string[] libs = Regex.Split(pkgconfig(package), @"\-r\:", RegexOptions.CultureInvariant); foreach (string r in libs) { string reference = r.Trim(); if (reference.Length == 0) continue; WriteTraceInfo("LOADING REFERENCE FROM PKGCONFIG '" + package + "' : " + reference); References.Add(LoadAssembly(reference)); } } [Conditional("TRACE")] private void WriteTraceInfo(string message) { if (TraceInfo) Console.Error.WriteLine(message); } private static string pkgconfig(string package) { #if NO_SYSTEM_DLL throw new System.NotSupportedException(); #else Process process; try { process = Builtins.shellp("pkg-config", String.Format("--libs {0}", package)); } catch (Exception e) { throw new ApplicationException(StringResources.BooC_PkgConfigNotFound, e); } process.WaitForExit(); if (process.ExitCode != 0) { throw new ApplicationException(string.Format(StringResources.BooC_PkgConfigReportedErrors, process.StandardError.ReadToEnd())); } return process.StandardOutput.ReadToEnd(); #endif } private static string GetSystemDir() { return Path.GetDirectoryName(typeof(string).Assembly.Location); } /// <summary> /// Max number of iterations for the application of AST attributes and the /// expansion of macros. /// </summary> public int MaxExpansionIterations { get; set; } public CompilerInputCollection Input { get { return _input; } } public List<string> LibPaths { get { return _libPaths; } } public CompilerResourceCollection Resources { get { return _resources; } } public CompilerReferenceCollection References { get { return _compilerReferences; } set { if (null == value) throw new ArgumentNullException("References"); _compilerReferences = value; } } /// <summary> /// The compilation pipeline. /// </summary> public CompilerPipeline Pipeline { get; set; } /// <summary> /// The name (full or partial) for the file /// that should receive the resulting assembly. /// </summary> public string OutputAssembly { get { return _outputAssembly; } set { if (String.IsNullOrEmpty(value)) throw new ArgumentNullException("OutputAssembly"); _outputAssembly = value; } } /// <summary> /// Type and execution subsystem for the generated portable /// executable file. /// </summary> public CompilerOutputType OutputType { get; set; } public bool GenerateInMemory { get; set; } public bool StdLib { get; set; } public TextWriter OutputWriter { get { return _outputWriter; } set { if (null == value) throw new ArgumentNullException("OutputWriter"); _outputWriter = value; } } public bool Debug { get; set; } /// <summary> /// Treat System.Object as duck /// </summary> public virtual bool Ducky { get; set; } public bool Checked { get; set; } public string KeyFile { get; set; } public string KeyContainer { get; set; } public bool DelaySign { get; set; } public bool WhiteSpaceAgnostic { get; set; } public Dictionary<string, string> Defines { get { return _defines; } } public TypeMemberModifiers DefaultTypeVisibility { get { if (!_defaultVisibilitySettingsRead) ReadDefaultVisibilitySettings(); return _defaultTypeVisibility; } set { _defaultTypeVisibility = value & TypeMemberModifiers.VisibilityMask; } } public TypeMemberModifiers DefaultMethodVisibility { get { if (!_defaultVisibilitySettingsRead) ReadDefaultVisibilitySettings(); return _defaultMethodVisibility; } set { _defaultMethodVisibility = value & TypeMemberModifiers.VisibilityMask; } } public TypeMemberModifiers DefaultPropertyVisibility { get { if (!_defaultVisibilitySettingsRead) ReadDefaultVisibilitySettings(); return _defaultPropertyVisibility; } set { _defaultPropertyVisibility = value & TypeMemberModifiers.VisibilityMask; } } public TypeMemberModifiers DefaultEventVisibility { get { if (!_defaultVisibilitySettingsRead) ReadDefaultVisibilitySettings(); return _defaultEventVisibility; } set { _defaultEventVisibility = value & TypeMemberModifiers.VisibilityMask; } } public TypeMemberModifiers DefaultFieldVisibility { get { if (!_defaultVisibilitySettingsRead) ReadDefaultVisibilitySettings(); return _defaultFieldVisibility; } set { _defaultFieldVisibility = value & TypeMemberModifiers.VisibilityMask; } } public bool TraceInfo { get { return TraceLevel >= TraceLevel.Info; } } public bool TraceWarning { get { return TraceLevel >= TraceLevel.Warning; } } public bool TraceError { get { return TraceLevel >= TraceLevel.Error; } } public bool TraceVerbose { get { return TraceLevel >= TraceLevel.Verbose; } } public TraceLevel TraceLevel { get; set; } private void ReadDefaultVisibilitySettings() { string visibility; if (_defines.TryGetValue("DEFAULT_TYPE_VISIBILITY", out visibility)) DefaultTypeVisibility = ParseVisibility(visibility); if (_defines.TryGetValue("DEFAULT_METHOD_VISIBILITY", out visibility)) DefaultMethodVisibility = ParseVisibility(visibility); if (_defines.TryGetValue("DEFAULT_PROPERTY_VISIBILITY", out visibility)) DefaultPropertyVisibility = ParseVisibility(visibility); if (_defines.TryGetValue("DEFAULT_EVENT_VISIBILITY", out visibility)) DefaultEventVisibility = ParseVisibility(visibility); if (_defines.TryGetValue("DEFAULT_FIELD_VISIBILITY", out visibility)) DefaultFieldVisibility = ParseVisibility(visibility); _defaultVisibilitySettingsRead = true; } private static TypeMemberModifiers ParseVisibility(string visibility) { if (String.IsNullOrEmpty(visibility)) throw new ArgumentNullException("visibility"); visibility = visibility.ToLower(); switch (visibility) { case "public": return TypeMemberModifiers.Public; case "protected": return TypeMemberModifiers.Protected; case "internal": return TypeMemberModifiers.Internal; case "private": return TypeMemberModifiers.Private; } throw new ArgumentException("visibility", String.Format("Invalid visibility: '{0}'", visibility)); } Util.Set<string> _disabledWarnings = new Util.Set<string>(); Util.Set<string> _promotedWarnings = new Util.Set<string>(); public bool NoWarn { get; set; } public bool WarnAsError { get; set; } public ICollection<string> DisabledWarnings { get { return _disabledWarnings; } } public ICollection<string> WarningsAsErrors { get { return _promotedWarnings; } } public void EnableWarning(string code) { if (_disabledWarnings.Contains(code)) _disabledWarnings.Remove(code); } public void DisableWarning(string code) { _disabledWarnings.Add(code); } public void ResetWarnings() { NoWarn = false; _disabledWarnings.Clear(); Strict = _strict; } public void EnableWarningAsError(string code) { _promotedWarnings.Add(code); } public void DisableWarningAsError(string code) { if (_promotedWarnings.Contains(code)) _promotedWarnings.Remove(code); } public void ResetWarningsAsErrors() { WarnAsError = false; _promotedWarnings.Clear(); } public bool Strict { get { return _strict; } set { _strict = value; if (_strict) OnStrictMode(); else OnNonStrictMode(); } } protected virtual void OnNonStrictMode() { _defaultTypeVisibility = TypeMemberModifiers.Public; _defaultMethodVisibility = TypeMemberModifiers.Public; _defaultPropertyVisibility = TypeMemberModifiers.Public; _defaultEventVisibility = TypeMemberModifiers.Public; _defaultFieldVisibility = TypeMemberModifiers.Protected; DisableWarning(CompilerWarningFactory.Codes.ImplicitReturn); DisableWarning(CompilerWarningFactory.Codes.VisibleMemberDoesNotDeclareTypeExplicitely); DisableWarning(CompilerWarningFactory.Codes.ImplicitDowncast); } protected virtual void OnStrictMode() { _defaultTypeVisibility = TypeMemberModifiers.Private; _defaultMethodVisibility = TypeMemberModifiers.Private; _defaultPropertyVisibility = TypeMemberModifiers.Private; _defaultEventVisibility = TypeMemberModifiers.Private; _defaultFieldVisibility = TypeMemberModifiers.Private; EnableWarning(CompilerWarningFactory.Codes.ImplicitReturn); EnableWarning(CompilerWarningFactory.Codes.VisibleMemberDoesNotDeclareTypeExplicitely); //by default strict mode forbids implicit downcasts //disable warning so we get only the regular incompatible type error DisableWarning(CompilerWarningFactory.Codes.ImplicitDowncast); } public bool Unsafe { get; set; } public string Platform { get; set; } public IEnvironment Environment { get; set; } } }
#region Apache Notice /***************************************************************************** * $Revision: 408164 $ * $LastChangedDate: 2006-05-21 14:27:09 +0200 (dim., 21 mai 2006) $ * $LastChangedBy: gbayon $ * * iBATIS.NET Data Mapper * Copyright (C) 2006/2005 - The Apache Software Foundation * * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ********************************************************************************/ #endregion #region Imports using System; using System.Collections; using System.Collections.Specialized; #endregion namespace IBatisNet.Common.Utilities.TypesResolver { /// <summary> /// Provides access to a central registry of aliased <see cref="System.Type"/>s. /// </summary> /// <remarks> /// <p> /// Simplifies configuration by allowing aliases to be used instead of /// fully qualified type names. /// </p> /// <p> /// Comes 'pre-loaded' with a number of convenience alias' for the more /// common types; an example would be the '<c>int</c>' (or '<c>Integer</c>' /// for Visual Basic.NET developers) alias for the <see cref="System.Int32"/> /// type. /// </p> /// </remarks> public class TypeRegistry { #region Constants /// <summary> /// The alias around the 'list' type. /// </summary> public const string ArrayListAlias1 = "arraylist"; /// <summary> /// Another alias around the 'list' type. /// </summary> public const string ArrayListAlias2 = "list"; /// <summary> /// Another alias around the 'bool' type. /// </summary> public const string BoolAlias = "bool"; /// <summary> /// The alias around the 'bool' type. /// </summary> public const string BooleanAlias = "boolean"; /// <summary> /// The alias around the 'byte' type. /// </summary> public const string ByteAlias = "byte"; /// <summary> /// The alias around the 'char' type. /// </summary> public const string CharAlias = "char"; /// <summary> /// The alias around the 'DateTime' type. /// </summary> public const string DateAlias1 = "datetime"; /// <summary> /// Another alias around the 'DateTime' type. /// </summary> public const string DateAlias2 = "date"; /// <summary> /// The alias around the 'decimal' type. /// </summary> public const string DecimalAlias = "decimal"; /// <summary> /// The alias around the 'double' type. /// </summary> public const string DoubleAlias = "double"; /// <summary> /// The alias around the 'float' type. /// </summary> public const string FloatAlias = "float"; /// <summary> /// Another alias around the 'float' type. /// </summary> public const string SingleAlias = "single"; /// <summary> /// The alias around the 'guid' type. /// </summary> public const string GuidAlias = "guid"; /// <summary> /// The alias around the 'Hashtable' type. /// </summary> public const string HashtableAlias1 = "hashtable"; /// <summary> /// Another alias around the 'Hashtable' type. /// </summary> public const string HashtableAlias2 = "map"; /// <summary> /// Another alias around the 'Hashtable' type. /// </summary> public const string HashtableAlias3 = "hashmap"; /// <summary> /// The alias around the 'short' type. /// </summary> public const string Int16Alias1 = "int16"; /// <summary> /// Another alias around the 'short' type. /// </summary> public const string Int16Alias2 = "short"; /// <summary> /// The alias around the 'int' type. /// </summary> public const string Int32Alias1 = "int32"; /// <summary> /// Another alias around the 'int' type. /// </summary> public const string Int32Alias2 = "int"; /// <summary> /// Another alias around the 'int' type. /// </summary> public const string Int32Alias3 = "integer"; /// <summary> /// The alias around the 'long' type. /// </summary> public const string Int64Alias1 = "int64"; /// <summary> /// Another alias around the 'long' type. /// </summary> public const string Int64Alias2 = "long"; /// <summary> /// The alias around the 'unsigned short' type. /// </summary> public const string UInt16Alias1 = "uint16"; /// <summary> /// Another alias around the 'unsigned short' type. /// </summary> public const string UInt16Alias2 = "ushort"; /// <summary> /// The alias around the 'unsigned int' type. /// </summary> public const string UInt32Alias1 = "uint32"; /// <summary> /// Another alias around the 'unsigned int' type. /// </summary> public const string UInt32Alias2 = "uint"; /// <summary> /// The alias around the 'unsigned long' type. /// </summary> public const string UInt64Alias1 = "uint64"; /// <summary> /// Another alias around the 'unsigned long' type. /// </summary> public const string UInt64Alias2 = "ulong"; /// <summary> /// The alias around the 'SByte' type. /// </summary> public const string SByteAlias = "sbyte"; /// <summary> /// The alias around the 'string' type. /// </summary> public const string StringAlias = "string"; /// <summary> /// The alias around the 'TimeSpan' type. /// </summary> public const string TimeSpanAlias = "timespan"; //#if dotnet2 /// <summary> /// The alias around the 'int?' type. /// </summary> public const string NullableInt32Alias = "int?"; /// <summary> /// The alias around the 'int?[]' array type. /// </summary> public const string NullableInt32ArrayAlias = "int?[]"; /// <summary> /// The alias around the 'decimal?' type. /// </summary> public const string NullableDecimalAlias = "decimal?"; /// <summary> /// The alias around the 'decimal?[]' array type. /// </summary> public const string NullableDecimalArrayAlias = "decimal?[]"; /// <summary> /// The alias around the 'char?' type. /// </summary> public const string NullableCharAlias = "char?"; /// <summary> /// The alias around the 'char?[]' array type. /// </summary> public const string NullableCharArrayAlias = "char?[]"; /// <summary> /// The alias around the 'long?' type. /// </summary> public const string NullableInt64Alias = "long?"; /// <summary> /// The alias around the 'long?[]' array type. /// </summary> public const string NullableInt64ArrayAlias = "long?[]"; /// <summary> /// The alias around the 'short?' type. /// </summary> public const string NullableInt16Alias = "short?"; /// <summary> /// The alias around the 'short?[]' array type. /// </summary> public const string NullableInt16ArrayAlias = "short?[]"; /// <summary> /// The alias around the 'unsigned int?' type. /// </summary> public const string NullableUInt32Alias = "uint?"; /// <summary> /// The alias around the 'unsigned long?' type. /// </summary> public const string NullableUInt64Alias = "ulong?"; /// <summary> /// The alias around the 'ulong?[]' array type. /// </summary> public const string NullableUInt64ArrayAlias = "ulong?[]"; /// <summary> /// The alias around the 'uint?[]' array type. /// </summary> public const string NullableUInt32ArrayAlias = "uint?[]"; /// <summary> /// The alias around the 'unsigned short?' type. /// </summary> public const string NullableUInt16Alias = "ushort?"; /// <summary> /// The alias around the 'ushort?[]' array type. /// </summary> public const string NullableUInt16ArrayAlias = "ushort?[]"; /// <summary> /// The alias around the 'double?' type. /// </summary> public const string NullableDoubleAlias = "double?"; /// <summary> /// The alias around the 'double?[]' array type. /// </summary> public const string NullableDoubleArrayAlias = "double?[]"; /// <summary> /// The alias around the 'float?' type. /// </summary> public const string NullableFloatAlias = "float?"; /// <summary> /// The alias around the 'float?[]' array type. /// </summary> public const string NullableFloatArrayAlias = "float?[]"; /// <summary> /// The alias around the 'bool?' type. /// </summary> public const string NullableBoolAlias = "bool?"; /// <summary> /// The alias around the 'bool?[]' array type. /// </summary> public const string NullableBoolArrayAlias = "bool?[]"; //#endif #endregion #region Fields private static IDictionary _types = new Hashtable(); #endregion #region Constructor (s) / Destructor /// <summary> /// Creates a new instance of the <see cref="TypeRegistry"/> class. /// </summary> /// <remarks> /// <p> /// This is a utility class, and as such has no publicly visible /// constructors. /// </p> /// </remarks> private TypeRegistry() { } /// <summary> /// Initialises the static properties of the TypeAliasResolver class. /// </summary> static TypeRegistry() { // Initialize a dictionary with some fully qualifiaed name _types[ArrayListAlias1] = typeof(ArrayList); _types[ArrayListAlias2] = typeof(ArrayList); _types[BoolAlias] = typeof(bool); _types[BooleanAlias] = typeof(bool); _types[ByteAlias] = typeof(byte); _types[CharAlias] = typeof(char); _types[DateAlias1] = typeof(DateTime); _types[DateAlias2] = typeof(DateTime); _types[DecimalAlias] = typeof(decimal); _types[DoubleAlias] = typeof(double); _types[FloatAlias] = typeof(float); _types[SingleAlias] = typeof(float); _types[GuidAlias] = typeof(Guid); _types[HashtableAlias1] = typeof(Hashtable); _types[HashtableAlias2] = typeof(Hashtable); _types[HashtableAlias3] = typeof(Hashtable); _types[Int16Alias1] = typeof(short); _types[Int16Alias2] = typeof(short); _types[Int32Alias1] = typeof(int); _types[Int32Alias2] = typeof(int); _types[Int32Alias3] = typeof(int); _types[Int64Alias1] = typeof(long); _types[Int64Alias2] = typeof(long); _types[UInt16Alias1] = typeof(ushort); _types[UInt16Alias2] = typeof(ushort); _types[UInt32Alias1] = typeof(uint); _types[UInt32Alias2] = typeof(uint); _types[UInt64Alias1] = typeof(ulong); _types[UInt64Alias2] = typeof(ulong); _types[SByteAlias] = typeof(sbyte); _types[StringAlias] = typeof(string); _types[TimeSpanAlias] = typeof(string); //#if dotnet2 _types[NullableInt32Alias] = typeof(int?); _types[NullableInt32ArrayAlias] = typeof(int?[]); _types[NullableDecimalAlias] = typeof(decimal?); _types[NullableDecimalArrayAlias] = typeof(decimal?[]); _types[NullableCharAlias] = typeof(char?); _types[NullableCharArrayAlias] = typeof(char?[]); _types[NullableInt64Alias] = typeof(long?); _types[NullableInt64ArrayAlias] = typeof(long?[]); _types[NullableInt16Alias] = typeof(short?); _types[NullableInt16ArrayAlias] = typeof(short?[]); _types[NullableUInt32Alias] = typeof(uint?); _types[NullableUInt32ArrayAlias] = typeof(uint?[]); _types[NullableUInt64Alias] = typeof(ulong?); _types[NullableUInt64ArrayAlias] = typeof(ulong?[]); _types[NullableUInt16Alias] = typeof(ushort?); _types[NullableUInt16ArrayAlias] = typeof(ushort?[]); _types[NullableDoubleAlias] = typeof(double?); _types[NullableDoubleArrayAlias] = typeof(double?[]); _types[NullableFloatAlias] = typeof(float?); _types[NullableFloatArrayAlias] = typeof(float?[]); _types[NullableBoolAlias] = typeof(bool?); _types[NullableBoolArrayAlias] = typeof(bool?[]); //#endif } #endregion #region Methods /// <summary> /// Resolves the supplied <paramref name="alias"/> to a <see cref="System.Type"/>. /// </summary> /// <param name="alias"> /// The alias to resolve. /// </param> /// <returns> /// The <see cref="System.Type"/> the supplied <paramref name="alias"/> was /// associated with, or <see lang="null"/> if no <see cref="System.Type"/> /// was previously registered for the supplied <paramref name="alias"/>. /// </returns> /// <remarks>The alis name will be convert in lower character before the resolution.</remarks> /// <exception cref="System.ArgumentNullException"> /// If the supplied <paramref name="alias"/> is <see langword="null"/> or /// contains only whitespace character(s). /// </exception> public static Type ResolveType(string alias) { return (Type)_types[alias.ToLower()]; } #endregion } }
/* * Copyright (c) 2015, InWorldz Halcyon Developers * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * * Neither the name of halcyon nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using OpenMetaverse; using System.IO; using Amib.Threading; using OpenSim.Framework; using log4net; using System.Reflection; using System.Threading; namespace InWorldz.Data.Assets.Stratus.Cache { /// <summary> /// A writeback cache used to suppliment cloud files when writing is slow /// </summary> internal class DiskWriteBackCache { private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); /// <summary> /// How long to wait on pending writes during a shutdown /// </summary> private const int SHUTDOWN_WAIT_TIMEOUT = 5 * 1000; /// <summary> /// The directory where we store the files that we're trying to send to CF /// </summary> private const string WRITEBACK_CACHE_DIR = "./cache/cf_writeback"; /// <summary> /// Timeout before we delete assets that have recently been written to CF (see _recentlyWritten) /// </summary> private const ulong RECENT_WRITE_TIMEOUT = 60 * 1000; /// <summary> /// The number of worker threads we'll use to write the waiting assets to CF /// </summary> private const int NUM_WRITE_WORKERS = 4; /// <summary> /// How long to sleep between trying to write assets /// </summary> private const int WRITE_TIMER_PERIOD = 1000; /// <summary> /// In memory list of IDs that are currently stored in the writeback cache /// </summary> private C5.HashedLinkedList<Guid> _ids = new C5.HashedLinkedList<Guid>(); /// <summary> /// Assets that were recently written to disk. This helps us to overcome a timing window whereby /// CF would report an asset missing while we were writing that asset to disk. Then, by the time /// the caller checks us, we would report the asset missing as well if we were able to write it to /// CF. Instead, this list can be checked by a process that can clean up the written assets after /// a timeout, getting rid of the timing window. /// </summary> private Dictionary<Guid, ulong> _recentlyWritten = new Dictionary<Guid, ulong>(); /// <summary> /// Lock taken during any operation /// </summary> private object _oplock = new object(); /// <summary> /// Write workers for copying our local assets to cloud files /// </summary> private CloudFilesAssetWorker[] _workers = new CloudFilesAssetWorker[NUM_WRITE_WORKERS]; /// <summary> /// Threadpool to cover our write backs /// </summary> private SmartThreadPool _writeBackPool = new SmartThreadPool(30 * 1000, NUM_WRITE_WORKERS); /// <summary> /// Timer that performs the write loop /// </summary> private Timer _writeTimer; /// <summary> /// Whether or not the cache should be running /// </summary> private volatile bool _stop; public DiskWriteBackCache() { CheckCacheDir(); LoadAssetIdsFromCacheDir(); _stop = true; } public void Start() { _stop = false; _writeTimer = new Timer(this.OnWriteTimer, null, WRITE_TIMER_PERIOD, Timeout.Infinite); } public void Stop() { _stop = true; _writeTimer.Change(Timeout.Infinite, Timeout.Infinite); _writeTimer.Dispose(); _writeBackPool.WaitForIdle(SHUTDOWN_WAIT_TIMEOUT); } private void OnWriteTimer(object state) { try { while (! _stop && this.DoWriteCycle()) { } } catch (Exception e) { m_log.ErrorFormat("[InWorldz.Stratus] Error when executing asset writeback {0}", e); } finally { if (!_stop) { _writeTimer.Change(WRITE_TIMER_PERIOD, Timeout.Infinite); } } } /// <summary> /// Loads all the asset IDs into memory from the file names in the cache directory /// </summary> private void LoadAssetIdsFromCacheDir() { foreach (string fileName in Directory.EnumerateFiles(WRITEBACK_CACHE_DIR, "*.asset")) { _ids.Add(Guid.Parse(Path.GetFileNameWithoutExtension(fileName))); } } /// <summary> /// Puts an asset into the writeback cache /// </summary> /// <param name="asset"></param> public void StoreAsset(StratusAsset asset) { CheckCacheDir(); lock (_oplock) { if (_ids.Contains(asset.Id) || _recentlyWritten.ContainsKey(asset.Id)) { //we already have this asset scheduled to write throw new AssetAlreadyExistsException("Asset " + asset.Id.ToString() + " already cached for writeback"); } try { using (FileStream fstream = File.OpenWrite(GetAssetFileName(asset.Id))) { ProtoBuf.Serializer.Serialize(fstream, asset); } } catch (Exception e) { m_log.ErrorFormat("There was an error writing an asset back to disk. The process will be terminated. {0}", e); Environment.Exit(-1); } _ids.Add(asset.Id); } } /// <summary> /// Tries to read an asset from the disk /// </summary> /// <param name="assetId">The asset ID</param> /// <returns></returns> public StratusAsset GetAsset(Guid assetId) { lock (_oplock) { if (!_ids.Contains(assetId) && !_recentlyWritten.ContainsKey(assetId)) { return null; } using (FileStream fstream = File.OpenRead(GetAssetFileName(assetId))) { return ProtoBuf.Serializer.Deserialize<StratusAsset>(fstream); } } } /// <summary> /// Returns the file name and path where we can find the given asset in the cache /// </summary> /// <param name="assetId"></param> /// <returns></returns> private static string GetAssetFileName(Guid assetId) { return String.Format("{0}/{1}.asset", WRITEBACK_CACHE_DIR, assetId.ToString()); } /// <summary> /// Makes sure the cache directory exists, and creates it if not /// </summary> private void CheckCacheDir() { if (!Directory.Exists(WRITEBACK_CACHE_DIR)) { Directory.CreateDirectory(WRITEBACK_CACHE_DIR); } } /// <summary> /// Performs a single write cycle. Attempts to write at most NUM_WRITE_WORKERS assets at a time to CF /// </summary> internal bool DoWriteCycle() { int count = 0; lock (_oplock) { count = _ids.Count; } if (count == 0) { CheckAndCleanOldWrites(); return false; } CheckPopulateWorkers(); int num = Math.Min(NUM_WRITE_WORKERS, count); List<Guid> idsToTry = GetNextAssetsWaitingForWrite(num); //fire up the writes for (int i = 0; i < num; ++i) { int capture = i; _writeBackPool.QueueWorkItem(() => { Guid assetId = idsToTry[capture]; StratusAsset asset = this.GetAsset(assetId); try { var stream = _workers[capture].StoreAsset(asset); stream.Dispose(); MarkAssetWritten(assetId); } catch (AssetAlreadyExistsException) { //this is ok, consider this a success MarkAssetWritten(assetId); } catch (Exception e) { //asset could not be written m_log.ErrorFormat("[InWorldz.Stratus] Error when retrying write for {0}: {1}", assetId, e); } } ); } _writeBackPool.WaitForIdle(); CheckAndCleanOldWrites(); lock (_oplock) { count = _ids.Count; } return count != 0; } private List<Guid> GetNextAssetsWaitingForWrite(int num) { List<Guid> idsToTry = new List<Guid>(num); lock (_oplock) { IEnumerator<Guid> walker = _ids.GetEnumerator(); int i = 0; while (walker.MoveNext() && i < num) { idsToTry.Add(walker.Current); ++i; } } return idsToTry; } /// <summary> /// Marks the given ID as having been delay written to CF /// </summary> /// <param name="assetId"></param> private void MarkAssetWritten(Guid assetId) { //asset was written lock (_oplock) { _ids.Remove(assetId); _recentlyWritten.Add(assetId, Util.GetLongTickCount()); } } /// <summary> /// Checks to make sure we have CF workers waiting in our collection, and if not, /// creates a new set /// </summary> private void CheckPopulateWorkers() { if (_workers[0] == null) { for (int i = 0; i < NUM_WRITE_WORKERS; i++) { _workers[i] = new CloudFilesAssetWorker(CloudFilesAssetWorker.DEFAULT_READ_TIMEOUT, 70 * 1000); } } } /// <summary> /// Checks for files that were written to CF and are older than the RECENT_WRITE_TIMEOUT /// </summary> private void CheckAndCleanOldWrites() { lock (_oplock) { if (_recentlyWritten.Count == 0) return; List<Guid> needsDelete = new List<Guid>(); foreach (var kvp in _recentlyWritten) { if (Config.Settings.Instance.UnitTest_DeleteOldCacheFilesImmediately || Util.GetLongTickCount() - kvp.Value >= RECENT_WRITE_TIMEOUT) { //needs a delete needsDelete.Add(kvp.Key); } } foreach (var id in needsDelete) { RemoveAssetFile(id); _recentlyWritten.Remove(id); } } } private void RemoveAssetFile(Guid id) { try { File.Delete(GetAssetFileName(id)); } catch (Exception e) { //this isnt a huge deal, but we want to log it m_log.ErrorFormat("[InWorldz.Stratus] Unable to remove disk cached asset {0}. {1}", id, e); } } } }
// Code generated by Microsoft (R) AutoRest Code Generator 1.0.1.0 // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Microsoft.Azure.Graph.RBAC { using Microsoft.Azure; using Microsoft.Azure.Graph; using Microsoft.Rest; using Microsoft.Rest.Azure; using Microsoft.Rest.Serialization; using Models; using Newtonsoft.Json; using System.Collections; using System.Collections.Generic; using System.Linq; using System.Net; using System.Net.Http; /// <summary> /// The Graph RBAC Management Client /// </summary> public partial class GraphRbacManagementClient : ServiceClient<GraphRbacManagementClient>, IGraphRbacManagementClient, IAzureClient { /// <summary> /// The base URI of the service. /// </summary> public System.Uri BaseUri { get; set; } /// <summary> /// Gets or sets json serialization settings. /// </summary> public JsonSerializerSettings SerializationSettings { get; private set; } /// <summary> /// Gets or sets json deserialization settings. /// </summary> public JsonSerializerSettings DeserializationSettings { get; private set; } /// <summary> /// Credentials needed for the client to connect to Azure. /// </summary> public ServiceClientCredentials Credentials { get; private set; } /// <summary> /// Client API version. /// </summary> public string ApiVersion { get; private set; } /// <summary> /// The tenant ID. /// </summary> public string TenantID { get; set; } /// <summary> /// Gets or sets the preferred language for the response. /// </summary> public string AcceptLanguage { get; set; } /// <summary> /// Gets or sets the retry timeout in seconds for Long Running Operations. /// Default value is 30. /// </summary> public int? LongRunningOperationRetryTimeout { get; set; } /// <summary> /// When set to true a unique x-ms-client-request-id value is generated and /// included in each request. Default is true. /// </summary> public bool? GenerateClientRequestId { get; set; } /// <summary> /// Gets the IObjectsOperations. /// </summary> public virtual IObjectsOperations Objects { get; private set; } /// <summary> /// Gets the IApplicationsOperations. /// </summary> public virtual IApplicationsOperations Applications { get; private set; } /// <summary> /// Gets the IGroupsOperations. /// </summary> public virtual IGroupsOperations Groups { get; private set; } /// <summary> /// Gets the IServicePrincipalsOperations. /// </summary> public virtual IServicePrincipalsOperations ServicePrincipals { get; private set; } /// <summary> /// Gets the IUsersOperations. /// </summary> public virtual IUsersOperations Users { get; private set; } /// <summary> /// Initializes a new instance of the GraphRbacManagementClient class. /// </summary> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> protected GraphRbacManagementClient(params DelegatingHandler[] handlers) : base(handlers) { Initialize(); } /// <summary> /// Initializes a new instance of the GraphRbacManagementClient class. /// </summary> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> protected GraphRbacManagementClient(HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : base(rootHandler, handlers) { Initialize(); } /// <summary> /// Initializes a new instance of the GraphRbacManagementClient class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> protected GraphRbacManagementClient(System.Uri baseUri, params DelegatingHandler[] handlers) : this(handlers) { if (baseUri == null) { throw new System.ArgumentNullException("baseUri"); } BaseUri = baseUri; } /// <summary> /// Initializes a new instance of the GraphRbacManagementClient class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> protected GraphRbacManagementClient(System.Uri baseUri, HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (baseUri == null) { throw new System.ArgumentNullException("baseUri"); } BaseUri = baseUri; } /// <summary> /// Initializes a new instance of the GraphRbacManagementClient class. /// </summary> /// <param name='credentials'> /// Required. Credentials needed for the client to connect to Azure. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public GraphRbacManagementClient(ServiceClientCredentials credentials, params DelegatingHandler[] handlers) : this(handlers) { if (credentials == null) { throw new System.ArgumentNullException("credentials"); } Credentials = credentials; if (Credentials != null) { Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes a new instance of the GraphRbacManagementClient class. /// </summary> /// <param name='credentials'> /// Required. Credentials needed for the client to connect to Azure. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public GraphRbacManagementClient(ServiceClientCredentials credentials, HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (credentials == null) { throw new System.ArgumentNullException("credentials"); } Credentials = credentials; if (Credentials != null) { Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes a new instance of the GraphRbacManagementClient class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='credentials'> /// Required. Credentials needed for the client to connect to Azure. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public GraphRbacManagementClient(System.Uri baseUri, ServiceClientCredentials credentials, params DelegatingHandler[] handlers) : this(handlers) { if (baseUri == null) { throw new System.ArgumentNullException("baseUri"); } if (credentials == null) { throw new System.ArgumentNullException("credentials"); } BaseUri = baseUri; Credentials = credentials; if (Credentials != null) { Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes a new instance of the GraphRbacManagementClient class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='credentials'> /// Required. Credentials needed for the client to connect to Azure. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public GraphRbacManagementClient(System.Uri baseUri, ServiceClientCredentials credentials, HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (baseUri == null) { throw new System.ArgumentNullException("baseUri"); } if (credentials == null) { throw new System.ArgumentNullException("credentials"); } BaseUri = baseUri; Credentials = credentials; if (Credentials != null) { Credentials.InitializeServiceClient(this); } } /// <summary> /// An optional partial-method to perform custom initialization. /// </summary> partial void CustomInitialize(); /// <summary> /// Initializes client properties. /// </summary> private void Initialize() { Objects = new ObjectsOperations(this); Applications = new ApplicationsOperations(this); Groups = new GroupsOperations(this); ServicePrincipals = new ServicePrincipalsOperations(this); Users = new UsersOperations(this); BaseUri = new System.Uri("https://graph.windows.net"); ApiVersion = "1.6"; AcceptLanguage = "en-US"; LongRunningOperationRetryTimeout = 30; GenerateClientRequestId = true; SerializationSettings = new JsonSerializerSettings { Formatting = Newtonsoft.Json.Formatting.Indented, DateFormatHandling = Newtonsoft.Json.DateFormatHandling.IsoDateFormat, DateTimeZoneHandling = Newtonsoft.Json.DateTimeZoneHandling.Utc, NullValueHandling = Newtonsoft.Json.NullValueHandling.Ignore, ReferenceLoopHandling = Newtonsoft.Json.ReferenceLoopHandling.Serialize, ContractResolver = new ReadOnlyJsonContractResolver(), Converters = new List<JsonConverter> { new Iso8601TimeSpanConverter() } }; SerializationSettings.Converters.Add(new TransformationJsonConverter()); DeserializationSettings = new JsonSerializerSettings { DateFormatHandling = Newtonsoft.Json.DateFormatHandling.IsoDateFormat, DateTimeZoneHandling = Newtonsoft.Json.DateTimeZoneHandling.Utc, NullValueHandling = Newtonsoft.Json.NullValueHandling.Ignore, ReferenceLoopHandling = Newtonsoft.Json.ReferenceLoopHandling.Serialize, ContractResolver = new ReadOnlyJsonContractResolver(), Converters = new List<JsonConverter> { new Iso8601TimeSpanConverter() } }; CustomInitialize(); DeserializationSettings.Converters.Add(new TransformationJsonConverter()); DeserializationSettings.Converters.Add(new CloudErrorJsonConverter()); } } }
///<summary> ///A red-black tree must satisfy these properties: /// ///1. The root is black. ///2. All leaves are black. ///3. Red nodes can only have black children. ///4. All paths from a node to its leaves contain the same number of black nodes. ///</summary> using System.Collections; using System.Text; using System; using System.Reflection; using System.Text.RegularExpressions; using Alachisoft.NCache.Common.Util; namespace Alachisoft.NCache.Common.DataStructures { public class RedBlack : ISizableIndex { //REGEX is the comparison based on the regular expression. //It is used for LIKE type comparisons. //IREGEX is the inverse comparison based on the regular expression. //It is used for NOT LIKE type of comparisons. public enum COMPARE { EQ, NE, LT, GT, LTEQ, GTEQ, REGEX, IREGEX } // the number of nodes contained in the tree private int intCount; // a simple randomized hash code. The hash code could be used as a key // if it is "unique" enough. Note: The IComparable interface would need to // be replaced with int. // identifies the owner of the tree // the tree private RedBlackNode rbTree; // sentinelNode is convenient way of indicating a leaf node. private RedBlackNode _sentinelNode = new RedBlackNode(); // the node that was last found; used to optimize searches private RedBlackNode lastNodeFound; // Tree Type private AttributeTypeSize _typeSize; // Tree Node Keys Size private long _rbNodeKeySize; // Tree Data Size private long _rbNodeDataSize; //whether a duplicate key. //used for logging string _cacheName; public RedBlack() { // set up the sentinel node. the sentinel node is the key to a successfull // implementation and for understanding the red-black tree properties. _sentinelNode.Left = _sentinelNode.Right = _sentinelNode; _sentinelNode.Parent = null; _sentinelNode.Color = RedBlackNode.BLACK; rbTree = _sentinelNode; lastNodeFound = _sentinelNode; } public RedBlack(string cacheName, AttributeTypeSize size) : this() { _cacheName = cacheName; _typeSize = size; } public RedBlackNode SentinelNode { get { return _sentinelNode; } } ///<summary> /// Add /// args: ByVal key As IComparable, ByVal data As Object /// key is object that implements IComparable interface /// performance tip: change to use use int type (such as the hashcode) ///</summary> public object Add(IComparable key, object data) { bool collision = false; RedBlackNodeReference keyNodeRfrnce = null; try { if (key == null || data == null) throw (new RedBlackException("RedBlackNode key and data must not be null")); // traverse tree - find where node belongs int result = 0; // create new node RedBlackNode node = new RedBlackNode(); RedBlackNode temp = rbTree; // grab the rbTree node of the tree while (temp != _sentinelNode) { // find Parent node.Parent = temp; if(key is string) result = key.ToString().ToLower().CompareTo(temp.Key.ToString().ToLower()); else result = key.CompareTo(temp.Key); if (result == 0) { collision = true; //data with the same key. break; } if (result > 0) { temp = temp.Right; collision = false; } else { temp = temp.Left; collision = false; } } if (collision) { long prevSize = temp.IndexInMemorySize; temp.Insert(data, null);//.Data[data] = null; keyNodeRfrnce = temp.RBNodeReference; _rbNodeDataSize += temp.IndexInMemorySize - prevSize; } else { // setup node node.Key = key; node.Insert(data, null);//.Data.Add(data, null); node.Left = _sentinelNode; node.Right = _sentinelNode; if (_typeSize != AttributeTypeSize.Variable) _rbNodeKeySize += MemoryUtil.GetTypeSize(_typeSize); else _rbNodeKeySize += MemoryUtil.GetStringSize(key); _rbNodeDataSize += node.IndexInMemorySize; // insert node into tree starting at parent's location if (node.Parent != null) { if (key is string) result = node.Key.ToString().ToLower().CompareTo(node.Parent.Key.ToString().ToLower()); else result = node.Key.CompareTo(node.Parent.Key); if (result > 0) node.Parent.Right = node; else node.Parent.Left = node; } else rbTree = node; // first node added RestoreAfterInsert(node); // restore red-black properities lastNodeFound = node; intCount = intCount + 1; keyNodeRfrnce= node.RBNodeReference; } } catch (Exception ex) { } return keyNodeRfrnce; } ///<summary> /// RestoreAfterInsert /// Additions to red-black trees usually destroy the red-black /// properties. Examine the tree and restore. Rotations are normally /// required to restore it ///</summary> private void RestoreAfterInsert(RedBlackNode x) { // x and y are used as variable names for brevity, in a more formal // implementation, you should probably change the names RedBlackNode y; // maintain red-black tree properties after adding x while(x != rbTree && x.Parent.Color == RedBlackNode.RED) { // Parent node is .Colored red; if(x.Parent == x.Parent.Parent.Left) // determine traversal path { // is it on the Left or Right subtree? y = x.Parent.Parent.Right; // get uncle if(y!= null && y.Color == RedBlackNode.RED) { // uncle is red; change x's Parent and uncle to black x.Parent.Color = RedBlackNode.BLACK; y.Color = RedBlackNode.BLACK; // grandparent must be red. Why? Every red node that is not // a leaf has only black children x.Parent.Parent.Color = RedBlackNode.RED; x = x.Parent.Parent; // continue loop with grandparent } else { // uncle is black; determine if x is greater than Parent if(x == x.Parent.Right) { // yes, x is greater than Parent; rotate Left // make x a Left child x = x.Parent; RotateLeft(x); } // no, x is less than Parent x.Parent.Color = RedBlackNode.BLACK; // make Parent black x.Parent.Parent.Color = RedBlackNode.RED; // make grandparent black RotateRight(x.Parent.Parent); // rotate right } } else { // x's Parent is on the Right subtree // this code is the same as above with "Left" and "Right" swapped y = x.Parent.Parent.Left; if(y!= null && y.Color == RedBlackNode.RED) { x.Parent.Color = RedBlackNode.BLACK; y.Color = RedBlackNode.BLACK; x.Parent.Parent.Color = RedBlackNode.RED; x = x.Parent.Parent; } else { if(x == x.Parent.Left) { x = x.Parent; RotateRight(x); } x.Parent.Color = RedBlackNode.BLACK; x.Parent.Parent.Color = RedBlackNode.RED; RotateLeft(x.Parent.Parent); } } } rbTree.Color = RedBlackNode.BLACK; // rbTree should always be black } ///<summary> /// RotateLeft /// Rebalance the tree by rotating the nodes to the left ///</summary> public void RotateLeft(RedBlackNode x) { // pushing node x down and to the Left to balance the tree. x's Right child (y) // replaces x (since y > x), and y's Left child becomes x's Right child // (since it's < y but > x). RedBlackNode y = x.Right; // get x's Right node, this becomes y // set x's Right link x.Right = y.Left; // y's Left child's becomes x's Right child // modify parents if(y.Left != _sentinelNode) y.Left.Parent = x; // sets y's Left Parent to x if(y != _sentinelNode) y.Parent = x.Parent; // set y's Parent to x's Parent if(x.Parent != null) { // determine which side of it's Parent x was on if(x == x.Parent.Left) x.Parent.Left = y; // set Left Parent to y else x.Parent.Right = y; // set Right Parent to y } else rbTree = y; // at rbTree, set it to y // link x and y y.Left = x; // put x on y's Left if(x != _sentinelNode) // set y as x's Parent x.Parent = y; } ///<summary> /// RotateRight /// Rebalance the tree by rotating the nodes to the right ///</summary> public void RotateRight(RedBlackNode x) { // pushing node x down and to the Right to balance the tree. x's Left child (y) // replaces x (since x < y), and y's Right child becomes x's Left child // (since it's < x but > y). RedBlackNode y = x.Left; // get x's Left node, this becomes y // set x's Right link x.Left = y.Right; // y's Right child becomes x's Left child // modify parents if(y.Right != _sentinelNode) y.Right.Parent = x; // sets y's Right Parent to x if(y != _sentinelNode) y.Parent = x.Parent; // set y's Parent to x's Parent if(x.Parent != null) // null=rbTree, could also have used rbTree { // determine which side of it's Parent x was on if(x == x.Parent.Right) x.Parent.Right = y; // set Right Parent to y else x.Parent.Left = y; // set Left Parent to y } else rbTree = y; // at rbTree, set it to y // link x and y y.Right = x; // put x on y's Right if(x != _sentinelNode) // set y as x's Parent x.Parent = y; } ///<summary> /// GetData /// Gets the data object associated with the specified key ///<summary> public object GetData(IComparable key, COMPARE compareType) { int result; ArrayList keyList = new ArrayList(); RedBlackNode treeNode = rbTree; // begin at root IDictionaryEnumerator en = this.GetEnumerator(); string pattern; WildcardEnabledRegex regex; Hashtable finalTable = null; Hashtable skippedKeys = null; bool isStringValue = false; if (key is string) isStringValue = true; switch(compareType) { case COMPARE.EQ: // traverse tree until node is found while (treeNode != _sentinelNode) { if (isStringValue && treeNode.Key is string) result = treeNode.Key.ToString().ToLower().CompareTo(key.ToString().ToLower()); else result = treeNode.Key.CompareTo(key); if(result == 0) { lastNodeFound = treeNode; keyList.AddRange(treeNode.Data.Keys); //return treeNode.Data; return keyList; } if(result > 0) //treenode is Greater then the one we are looking. Move to Left branch treeNode = treeNode.Left; else treeNode = treeNode.Right; //treenode is Less then the one we are looking. Move to Right branch. } break; case COMPARE.NE: // traverse tree until node is found finalTable = new Hashtable(); while (en.MoveNext()) { if (isStringValue && en.Key is string) result = ((IComparable)en.Key).ToString().ToLower().CompareTo(key.ToString().ToLower()); else result = ((IComparable)en.Key).CompareTo(key); if (result != 0) { Hashtable tmp = en.Value as Hashtable; IDictionaryEnumerator ide = tmp.GetEnumerator(); while (ide.MoveNext()) finalTable[ide.Key] = ide.Value; } } return new ArrayList(finalTable.Keys);//keyList; break; case COMPARE.GT: finalTable = new Hashtable(); while (en.MoveNext()) { if (isStringValue && en.Key is string) result = ((IComparable)en.Key).ToString().ToLower().CompareTo(key.ToString().ToLower()); else result = ((IComparable)en.Key).CompareTo(key); if (result > 0) { Hashtable tmp = en.Value as Hashtable; IDictionaryEnumerator ide = tmp.GetEnumerator(); while (ide.MoveNext()) finalTable[ide.Key] = ide.Value; } } return new ArrayList(finalTable.Keys);//keyList; break; case COMPARE.LT: finalTable = new Hashtable(); while (en.MoveNext()) { if (isStringValue && en.Key is string) result = ((IComparable)en.Key).ToString().ToLower().CompareTo(key.ToString().ToLower()); else result = ((IComparable)en.Key).CompareTo(key); if (result < 0) { Hashtable tmp = en.Value as Hashtable; IDictionaryEnumerator ide = tmp.GetEnumerator(); while (ide.MoveNext()) finalTable[ide.Key] = ide.Value; } } return new ArrayList(finalTable.Keys);//keyList; break; case COMPARE.GTEQ: finalTable = new Hashtable(); while (en.MoveNext()) { if (isStringValue && en.Key is string) result = ((IComparable)en.Key).ToString().ToLower().CompareTo(key.ToString().ToLower()); else result = ((IComparable)en.Key).CompareTo(key); if (result >= 0) { Hashtable tmp = en.Value as Hashtable; IDictionaryEnumerator ide = tmp.GetEnumerator(); while (ide.MoveNext()) finalTable[ide.Key] = ide.Value; } } return new ArrayList(finalTable.Keys);//keyList; break; case COMPARE.LTEQ: finalTable = new Hashtable(); while (en.MoveNext()) { if (isStringValue && en.Key is string) result = ((IComparable)en.Key).ToString().ToLower().CompareTo(key.ToString().ToLower()); else result = ((IComparable)en.Key).CompareTo(key); if (result <= 0) { Hashtable tmp = en.Value as Hashtable; IDictionaryEnumerator ide = tmp.GetEnumerator(); while (ide.MoveNext()) finalTable[ide.Key] = ide.Value; } else break; } return new ArrayList(finalTable.Keys);//keyList; break; case COMPARE.REGEX: finalTable = new Hashtable(); pattern = key as string; regex = new WildcardEnabledRegex(pattern); while (en.MoveNext()) { if (en.Key is string) { if (regex.IsMatch((string)en.Key.ToString().ToLower())) { Hashtable tmp = en.Value as Hashtable; IDictionaryEnumerator ide = tmp.GetEnumerator(); while (ide.MoveNext()) finalTable[ide.Key] = ide.Value; } } } return new ArrayList(finalTable.Keys);//keyList; break; case COMPARE.IREGEX: finalTable = new Hashtable(); pattern = key as string; regex = new WildcardEnabledRegex(pattern); skippedKeys = new Hashtable(); while (en.MoveNext()) { if (en.Key is string) { if (regex.IsMatch((string)en.Key.ToString().ToLower())) { Hashtable tmp = en.Value as Hashtable; IDictionaryEnumerator ide = tmp.GetEnumerator(); while (ide.MoveNext()) { skippedKeys[ide.Key] = ide.Value; } } else { Hashtable tmp = en.Value as Hashtable; IDictionaryEnumerator ide = tmp.GetEnumerator(); while (ide.MoveNext()) { finalTable[ide.Key] = ide.Value; } } } } ArrayList list = new ArrayList(finalTable.Keys);// keyList; for (int idx = list.Count - 1; idx >= 0; idx--) { if (skippedKeys.ContainsKey(list[idx])) { list.RemoveAt(idx); } } return list; break; } return keyList; } ///<summary> /// return true if a specifeid key exists ///<summary> public bool Contains(IComparable key) { int result; RedBlackNode treeNode = rbTree; // begin at root // traverse tree until node is found while(treeNode != _sentinelNode) { result = treeNode.Key.CompareTo(key); if(result == 0) { lastNodeFound = treeNode; return true; } if (result > 0) //treenode is Greater then the one we are looking. Move to Left branch treeNode = treeNode.Left; else treeNode = treeNode.Right; //treenode is Less then the one we are looking. Move to Right branch. } return false; } ///<summary> /// GetMinKey /// Returns the minimum key value ///<summary> public IComparable MinKey { get { RedBlackNode treeNode = rbTree; if(treeNode == null || treeNode == _sentinelNode) return null; // traverse to the extreme left to find the smallest key while(treeNode.Left != _sentinelNode) treeNode = treeNode.Left; lastNodeFound = treeNode; return treeNode.Key; } } ///<summary> /// GetMaxKey /// Returns the maximum key value ///<summary> public IComparable MaxKey { get { RedBlackNode treeNode = rbTree; if(treeNode == null || treeNode == _sentinelNode) throw(new RedBlackException("RedBlack tree is empty")); // traverse to the extreme right to find the largest key while(treeNode.Right != _sentinelNode) treeNode = treeNode.Right; lastNodeFound = treeNode; return treeNode.Key; } } ///<summary> /// GetEnumerator /// return an enumerator that returns the tree nodes in order ///<summary> public RedBlackEnumerator GetEnumerator() { // elements is simply a generic name to refer to the // data objects the nodes contain return Elements(true); } ///<summary> /// Keys /// if(ascending is true, the keys will be returned in ascending order, else /// the keys will be returned in descending order. ///<summary> public RedBlackEnumerator Keys() { return Keys(true); } public RedBlackEnumerator Keys(bool ascending) { return new RedBlackEnumerator(rbTree, ascending, _sentinelNode); } ///<summary> /// Elements /// Returns an enumeration of the data objects. /// if(ascending is true, the objects will be returned in ascending order, /// else the objects will be returned in descending order. ///<summary> public RedBlackEnumerator Elements() { return Elements(true); } public RedBlackEnumerator Elements(bool ascending) { return new RedBlackEnumerator(rbTree, ascending, _sentinelNode); } ///<summary> /// IsEmpty /// Is the tree empty? ///<summary> public bool IsEmpty { get { return (rbTree == null); } } public void Remove(object indexKey) { Remove(indexKey, null); } ///<summary> /// Remove /// removes the key and data object (delete) ///<summary> public bool Remove(object cacheKey, object node) { bool isNodeRemoved = false; RedBlackNodeReference keyNodeReference = (RedBlackNodeReference)node; RedBlackNode keyNode = keyNodeReference.RBReference; try { if (cacheKey != null && keyNode.Data.Count > 1) { if (keyNode.Data.Contains(cacheKey)) { keyNode.Data.Remove(cacheKey); isNodeRemoved = false; } } else { if (_typeSize != AttributeTypeSize.Variable) _rbNodeKeySize -= MemoryUtil.GetTypeSize(_typeSize); else _rbNodeKeySize -= MemoryUtil.GetStringSize(keyNode.Key); _rbNodeDataSize -= keyNode.IndexInMemorySize; Delete(keyNode); isNodeRemoved = true; } } catch(Exception) { throw; } if(isNodeRemoved) intCount = intCount - 1; return isNodeRemoved; } public void Remove(IComparable indexKey, object cacheKey) { bool isNodeRemoved = false; if (indexKey == null) throw (new RedBlackException("RedBlackNode key is null")); try { // find node int result; RedBlackNode node; // see if node to be deleted was the last one found if(indexKey is string) result = indexKey.ToString().ToLower().CompareTo(lastNodeFound.Key.ToString().ToLower()); else result = indexKey.CompareTo(lastNodeFound.Key); if (result == 0) node = lastNodeFound; else { // not found, must search node = rbTree; while (node != _sentinelNode) { if (indexKey is string) result = indexKey.ToString().ToLower().CompareTo(node.Key.ToString().ToLower()); else result = indexKey.CompareTo(node.Key); if (result == 0) break; if (result < 0) node = node.Left; else node = node.Right; } if (node == _sentinelNode) { return; // key not found } } try { if (cacheKey != null && node.Data.Count > 1) { if (node.Data.Contains(cacheKey)) { node.Data.Remove(cacheKey); isNodeRemoved = false; } } else { if (_typeSize != AttributeTypeSize.Variable) _rbNodeKeySize -= MemoryUtil.GetTypeSize(_typeSize); else _rbNodeKeySize -= MemoryUtil.GetStringSize(node.Key); _rbNodeDataSize -= node.IndexInMemorySize; Delete(node); isNodeRemoved = true; } } catch (Exception) { return; } } catch (Exception) { throw; } if (isNodeRemoved) intCount = intCount - 1; } ///<summary> /// Delete /// Delete a node from the tree and restore red black properties ///<summary> private void Delete(RedBlackNode z) { // A node to be deleted will be: // 1. a leaf with no children // 2. have one child // 3. have two children // If the deleted node is red, the red black properties still hold. // If the deleted node is black, the tree needs rebalancing RedBlackNode x = new RedBlackNode(); // work node to contain the replacement node RedBlackNode y; // work node // find the replacement node (the successor to x) - the node one with // at *most* one child. if(z.Left == _sentinelNode || z.Right == _sentinelNode) y = z; // node has sentinel as a child else { // z has two children, find replacement node which will // be the leftmost node greater than z y = z.Right; // traverse right subtree while(y.Left != _sentinelNode) // to find next node in sequence y = y.Left; } // at this point, y contains the replacement node. it's content will be copied // to the valules in the node to be deleted // x (y's only child) is the node that will be linked to y's old parent. if(y.Left != _sentinelNode) x = y.Left; else x = y.Right; // replace x's parent with y's parent and // link x to proper subtree in parent // this removes y from the chain x.Parent = y.Parent; if(y.Parent != null) if(y == y.Parent.Left) y.Parent.Left = x; else y.Parent.Right = x; else rbTree = x; // make x the root node // copy the values from y (the replacement node) to the node being deleted. // note: this effectively deletes the node. if(y != z) { z.Key = y.Key; z.Data = y.Data; //un-commented by [Asif Imam] 12 Jun,08 z.RBNodeReference = y.RBNodeReference; z.RBNodeReference.RBReference = z; } if(y.Color == RedBlackNode.BLACK) RestoreAfterDelete(x); lastNodeFound = _sentinelNode; } ///<summary> /// RestoreAfterDelete /// Deletions from red-black trees may destroy the red-black /// properties. Examine the tree and restore. Rotations are normally /// required to restore it ///</summary> private void RestoreAfterDelete(RedBlackNode x) { // maintain Red-Black tree balance after deleting node RedBlackNode y; while(x != rbTree && x.Color == RedBlackNode.BLACK) { if(x == x.Parent.Left) // determine sub tree from parent { y = x.Parent.Right; // y is x's sibling if(y.Color == RedBlackNode.RED) { // x is black, y is red - make both black and rotate y.Color = RedBlackNode.BLACK; x.Parent.Color = RedBlackNode.RED; RotateLeft(x.Parent); y = x.Parent.Right; } if(y.Left.Color == RedBlackNode.BLACK && y.Right.Color == RedBlackNode.BLACK) { // children are both black y.Color = RedBlackNode.RED; // change parent to red x = x.Parent; // move up the tree } else { if(y.Right.Color == RedBlackNode.BLACK) { y.Left.Color = RedBlackNode.BLACK; y.Color = RedBlackNode.RED; RotateRight(y); y = x.Parent.Right; } y.Color = x.Parent.Color; x.Parent.Color = RedBlackNode.BLACK; y.Right.Color = RedBlackNode.BLACK; RotateLeft(x.Parent); x = rbTree; } } else { // right subtree - same as code above with right and left swapped y = x.Parent.Left; if(y.Color == RedBlackNode.RED) { y.Color = RedBlackNode.BLACK; x.Parent.Color = RedBlackNode.RED; RotateRight (x.Parent); y = x.Parent.Left; } if(y.Right.Color == RedBlackNode.BLACK && y.Left.Color == RedBlackNode.BLACK) { y.Color = RedBlackNode.RED; x = x.Parent; } else { if(y.Left.Color == RedBlackNode.BLACK) { y.Right.Color = RedBlackNode.BLACK; y.Color = RedBlackNode.RED; RotateLeft(y); y = x.Parent.Left; } y.Color = x.Parent.Color; x.Parent.Color = RedBlackNode.BLACK; y.Left.Color = RedBlackNode.BLACK; RotateRight(x.Parent); x = rbTree; } } } x.Color = RedBlackNode.BLACK; } ///<summary> /// RemoveMin /// removes the node with the minimum key ///<summary> public void RemoveMin() { if(rbTree == null) throw(new RedBlackException("RedBlackNode is null")); Remove(MinKey); } ///<summary> /// RemoveMax /// removes the node with the maximum key ///<summary> public void RemoveMax() { if(rbTree == null) throw(new RedBlackException("RedBlackNode is null")); Remove(MaxKey); } ///<summary> /// Clear /// Empties or clears the tree ///<summary> public void Clear () { rbTree = _sentinelNode; intCount = 0; _rbNodeDataSize = 0; _rbNodeKeySize = 0; } ///<summary> /// Size /// returns the size (number of nodes) in the tree ///<summary> public int Count { // number of keys get { return intCount; } } ///<summary> /// Equals ///<summary> public override bool Equals(object obj) { if(obj == null) return false; if(!(obj is RedBlackNode )) return false; if(this == obj) return true; return (ToString().Equals(((RedBlackNode)(obj)).ToString())); } ///<summary> /// HashCode ///<summary> public override int GetHashCode() { return 0; } ///<summary> /// ToString ///<summary> public override string ToString() { //return strIdentifier.ToString(); return ""; } public long IndexInMemorySize { get { return _rbNodeKeySize + _rbNodeDataSize; } } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. //////////////////////////////////////////////////////////////////////////// // // Class: TextInfo // // Purpose: This Class defines behaviors specific to a writing system. // A writing system is the collection of scripts and // orthographic rules required to represent a language as text. // // Date: March 31, 1999 // //////////////////////////////////////////////////////////////////////////// using System.Security; using System; using System.Text; using System.Threading; using System.Runtime; using System.Diagnostics.Contracts; using System.Runtime.InteropServices; using System.Runtime.CompilerServices; namespace System.Globalization { public partial class TextInfo { ////--------------------------------------------------------------------// //// Internal Information // ////--------------------------------------------------------------------// //// //// Variables. //// private String m_listSeparator; private bool m_isReadOnly = false; //// //// In Whidbey we had several names: //// m_win32LangID is the name of the culture, but only used for (de)serialization. //// customCultureName is the name of the creating custom culture (if custom) In combination with m_win32LangID //// this is authoratative, ie when deserializing. //// m_cultureTableRecord was the data record of the creating culture. (could have different name if custom) //// m_textInfoID is the LCID of the textinfo itself (no longer used) //// m_name is the culture name (from cultureinfo.name) //// //// In Silverlight/Arrowhead this is slightly different: //// m_cultureName is the name of the creating culture. Note that we consider this authoratative, //// if the culture's textinfo changes when deserializing, then behavior may change. //// (ala Whidbey behavior). This is the only string Arrowhead needs to serialize. //// m_cultureData is the data that backs this class. //// m_textInfoName is the actual name of the textInfo (from cultureData.STEXTINFO) //// this can be the same as m_cultureName on Silverlight since the OS knows //// how to do the sorting. However in the desktop, when we call the sorting dll, it doesn't //// know how to resolve custom locle names to sort ids so we have to have alredy resolved this. //// private readonly String m_cultureName; // Name of the culture that created this text info private readonly CultureData m_cultureData; // Data record for the culture that made us, not for this textinfo private readonly String m_textInfoName; // Name of the text info we're using (ie: m_cultureData.STEXTINFO) private bool? m_IsAsciiCasingSameAsInvariant; // Invariant text info internal static TextInfo Invariant { get { if (s_Invariant == null) s_Invariant = new TextInfo(CultureData.Invariant); return s_Invariant; } } internal volatile static TextInfo s_Invariant; // // Internal ordinal comparison functions // internal static int GetHashCodeOrdinalIgnoreCase(String s) { // This is the same as an case insensitive hash for Invariant // (not necessarily true for sorting, but OK for casing & then we apply normal hash code rules) return (Invariant.GetCaseInsensitiveHashCode(s)); } // Currently we don't have native functions to do this, so we do it the hard way internal static int IndexOfStringOrdinalIgnoreCase(String source, String value, int startIndex, int count) { if (count > source.Length || count < 0 || startIndex < 0 || startIndex >= source.Length || startIndex + count > source.Length) { return -1; } return CompareInfo.IndexOfOrdinal(source, value, startIndex, count, ignoreCase: true); } // Currently we don't have native functions to do this, so we do it the hard way internal static int LastIndexOfStringOrdinalIgnoreCase(String source, String value, int startIndex, int count) { if (count > source.Length || count < 0 || startIndex < 0 || startIndex > source.Length - 1 || (startIndex - count + 1 < 0)) { return -1; } return CompareInfo.LastIndexOfOrdinal(source, value, startIndex, count, ignoreCase: true); } ////////////////////////////////////////////////////////////////////////// //// //// CultureName //// //// The name of the culture associated with the current TextInfo. //// ////////////////////////////////////////////////////////////////////////// public string CultureName { get { return m_textInfoName; } } //////////////////////////////////////////////////////////////////////// // // IsReadOnly // // Detect if the object is readonly. // //////////////////////////////////////////////////////////////////////// [System.Runtime.InteropServices.ComVisible(false)] public bool IsReadOnly { get { return (m_isReadOnly); } } ////////////////////////////////////////////////////////////////////////// //// //// Clone //// //// Is the implementation of IColnable. //// ////////////////////////////////////////////////////////////////////////// internal virtual Object Clone() { object o = MemberwiseClone(); ((TextInfo)o).SetReadOnlyState(false); return (o); } //////////////////////////////////////////////////////////////////////// // // ReadOnly // // Create a cloned readonly instance or return the input one if it is // readonly. // //////////////////////////////////////////////////////////////////////// [System.Runtime.InteropServices.ComVisible(false)] internal static TextInfo ReadOnly(TextInfo textInfo) { if (textInfo == null) { throw new ArgumentNullException("textInfo"); } Contract.EndContractBlock(); if (textInfo.IsReadOnly) { return (textInfo); } TextInfo clonedTextInfo = (TextInfo)(textInfo.MemberwiseClone()); clonedTextInfo.SetReadOnlyState(true); return (clonedTextInfo); } private void VerifyWritable() { if (m_isReadOnly) { throw new InvalidOperationException(SR.InvalidOperation_ReadOnly); } } internal void SetReadOnlyState(bool readOnly) { m_isReadOnly = readOnly; } //////////////////////////////////////////////////////////////////////// // // ListSeparator // // Returns the string used to separate items in a list. // //////////////////////////////////////////////////////////////////////// public virtual String ListSeparator { get { if (m_listSeparator == null) { m_listSeparator = this.m_cultureData.SLIST; } return (m_listSeparator); } set { if (value == null) { throw new ArgumentNullException("value", SR.ArgumentNull_String); } VerifyWritable(); m_listSeparator = value; } } //////////////////////////////////////////////////////////////////////// // // ToLower // // Converts the character or string to lower case. Certain locales // have different casing semantics from the file systems in Win32. // //////////////////////////////////////////////////////////////////////// public unsafe virtual char ToLower(char c) { if (IsAscii(c) && IsAsciiCasingSameAsInvariant) { return ToLowerAsciiInvariant(c); } return (ChangeCase(c, toUpper: false)); } public unsafe virtual String ToLower(String str) { if (str == null) { throw new ArgumentNullException("str"); } return ChangeCase(str, toUpper: false); } static private Char ToLowerAsciiInvariant(Char c) { if ('A' <= c && c <= 'Z') { c = (Char)(c | 0x20); } return c; } //////////////////////////////////////////////////////////////////////// // // ToUpper // // Converts the character or string to upper case. Certain locales // have different casing semantics from the file systems in Win32. // //////////////////////////////////////////////////////////////////////// public unsafe virtual char ToUpper(char c) { if (IsAscii(c) && IsAsciiCasingSameAsInvariant) { return ToUpperAsciiInvariant(c); } return (ChangeCase(c, toUpper: true)); } public unsafe virtual String ToUpper(String str) { if (str == null) { throw new ArgumentNullException("str"); } return ChangeCase(str, toUpper: true); } static private Char ToUpperAsciiInvariant(Char c) { if ('a' <= c && c <= 'z') { c = (Char)(c & ~0x20); } return c; } static private bool IsAscii(Char c) { return c < 0x80; } private bool IsAsciiCasingSameAsInvariant { get { #if PLATFORM_UNIX // UNIXTODO: This hack can be removed once collation works and the code after this correctly returns "false". if (m_needsTurkishCasing) { return false; } #endif if (m_IsAsciiCasingSameAsInvariant == null) { m_IsAsciiCasingSameAsInvariant = CultureInfo.GetCultureInfo(m_textInfoName).CompareInfo.Compare("abcdefghijklmnopqrstuvwxyz", "ABCDEFGHIJKLMNOPQRSTUVWXYZ", CompareOptions.IgnoreCase) == 0; } return (bool)m_IsAsciiCasingSameAsInvariant; } } // IsRightToLeft // // Returns true if the dominant direction of text and UI such as the relative position of buttons and scroll bars // public bool IsRightToLeft { get { return this.m_cultureData.IsRightToLeft; } } //////////////////////////////////////////////////////////////////////// // // Equals // // Implements Object.Equals(). Returns a boolean indicating whether // or not object refers to the same CultureInfo as the current instance. // //////////////////////////////////////////////////////////////////////// public override bool Equals(Object obj) { TextInfo that = obj as TextInfo; if (that != null) { return this.CultureName.Equals(that.CultureName); } return (false); } //////////////////////////////////////////////////////////////////////// // // GetHashCode // // Implements Object.GetHashCode(). Returns the hash code for the // CultureInfo. The hash code is guaranteed to be the same for CultureInfo A // and B where A.Equals(B) is true. // //////////////////////////////////////////////////////////////////////// public override int GetHashCode() { return (this.CultureName.GetHashCode()); } //////////////////////////////////////////////////////////////////////// // // ToString // // Implements Object.ToString(). Returns a string describing the // TextInfo. // //////////////////////////////////////////////////////////////////////// public override String ToString() { return ("TextInfo - " + this.m_cultureData.CultureName); } // // Get case-insensitive hash code for the specified string. // internal unsafe int GetCaseInsensitiveHashCode(String str) { // Validate inputs if (str == null) { throw new ArgumentNullException("str"); } // This code assumes that ASCII casing is safe for whatever context is passed in. // this is true today, because we only ever call these methods on Invariant. It would be ideal to refactor // these methods so they were correct by construction and we could only ever use Invariant. uint hash = 5381; uint c; // Note: We assume that str contains only ASCII characters until // we hit a non-ASCII character to optimize the common case. for (int i = 0; i < str.Length; i++) { c = str[i]; if (c >= 0x80) { return GetCaseInsensitiveHashCodeSlow(str); } // If we have a lowercase character, ANDing off 0x20 // will make it an uppercase character. if ((c - 'a') <= ('z' - 'a')) { c = (uint)((int)c & ~0x20); } hash = ((hash << 5) + hash) ^ c; } return (int)hash; } private unsafe int GetCaseInsensitiveHashCodeSlow(String str) { Contract.Assert(str != null); string upper = ToUpper(str); uint hash = 5381; uint c; for (int i = 0; i < upper.Length; i++) { c = upper[i]; hash = ((hash << 5) + hash) ^ c; } return (int)hash; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /*============================================================ ** ** ** ** Purpose: Managed ACL wrapper for Win32 events. ** ** ===========================================================*/ using System; using System.Collections; using System.Security.Principal; using Microsoft.Win32; using Microsoft.Win32.SafeHandles; using System.Runtime.InteropServices; using System.Threading; namespace System.Security.AccessControl { // Derive this list of values from winnt.h and MSDN docs: // http://msdn.microsoft.com/library/default.asp?url=/library/en-us/dllproc/base/synchronization_object_security_and_access_rights.asp // Win32's interesting values are EVENT_MODIFY_STATE (0x0002) and // EVENT_ALL_ACCESS (0x1F0003). I don't know what 0x1 is, but Windows // includes it in EVENT_ALL_ACCESS. [Flags] public enum EventWaitHandleRights { Modify = 0x000002, Delete = 0x010000, ReadPermissions = 0x020000, ChangePermissions = 0x040000, TakeOwnership = 0x080000, Synchronize = 0x100000, // SYNCHRONIZE FullControl = 0x1F0003 } public sealed class EventWaitHandleAccessRule : AccessRule { // Constructor for creating access rules for registry objects public EventWaitHandleAccessRule(IdentityReference identity, EventWaitHandleRights eventRights, AccessControlType type) : this(identity, (int)eventRights, false, InheritanceFlags.None, PropagationFlags.None, type) { } public EventWaitHandleAccessRule(String identity, EventWaitHandleRights eventRights, AccessControlType type) : this(new NTAccount(identity), (int)eventRights, false, InheritanceFlags.None, PropagationFlags.None, type) { } // // Internal constructor to be called by public constructors // and the access rule factory methods of {File|Folder}Security // internal EventWaitHandleAccessRule( IdentityReference identity, int accessMask, bool isInherited, InheritanceFlags inheritanceFlags, PropagationFlags propagationFlags, AccessControlType type) : base( identity, accessMask, isInherited, inheritanceFlags, propagationFlags, type) { } public EventWaitHandleRights EventWaitHandleRights { get { return (EventWaitHandleRights)base.AccessMask; } } } public sealed class EventWaitHandleAuditRule : AuditRule { public EventWaitHandleAuditRule(IdentityReference identity, EventWaitHandleRights eventRights, AuditFlags flags) : this(identity, (int)eventRights, false, InheritanceFlags.None, PropagationFlags.None, flags) { } /* // Not in the spec public EventWaitHandleAuditRule(string identity, EventWaitHandleRights eventRights, AuditFlags flags) : this(new NTAccount(identity), (int) eventRights, false, InheritanceFlags.None, PropagationFlags.None, flags) { } */ internal EventWaitHandleAuditRule(IdentityReference identity, int accessMask, bool isInherited, InheritanceFlags inheritanceFlags, PropagationFlags propagationFlags, AuditFlags flags) : base(identity, accessMask, isInherited, inheritanceFlags, propagationFlags, flags) { } public EventWaitHandleRights EventWaitHandleRights { get { return (EventWaitHandleRights)base.AccessMask; } } } public sealed class EventWaitHandleSecurity : NativeObjectSecurity { public EventWaitHandleSecurity() : base(true, ResourceType.KernelObject) { } [System.Security.SecurityCritical] // auto-generated internal EventWaitHandleSecurity(String name, AccessControlSections includeSections) : base(true, ResourceType.KernelObject, name, includeSections, _HandleErrorCode, null) { // Let the underlying ACL API's demand unmanaged code permission. } [System.Security.SecurityCritical] // auto-generated internal EventWaitHandleSecurity(SafeWaitHandle handle, AccessControlSections includeSections) : base(true, ResourceType.KernelObject, handle, includeSections, _HandleErrorCode, null) { // Let the underlying ACL API's demand unmanaged code permission. } [System.Security.SecurityCritical] // auto-generated private static Exception _HandleErrorCode(int errorCode, string name, SafeHandle handle, object context) { System.Exception exception = null; switch (errorCode) { case Interop.mincore.Errors.ERROR_INVALID_NAME: case Interop.mincore.Errors.ERROR_INVALID_HANDLE: case Interop.mincore.Errors.ERROR_FILE_NOT_FOUND: if ((name != null) && (name.Length != 0)) exception = new WaitHandleCannotBeOpenedException(SR.Format(SR.WaitHandleCannotBeOpenedException_InvalidHandle, name)); else exception = new WaitHandleCannotBeOpenedException(); break; default: break; } return exception; } public override AccessRule AccessRuleFactory(IdentityReference identityReference, int accessMask, bool isInherited, InheritanceFlags inheritanceFlags, PropagationFlags propagationFlags, AccessControlType type) { return new EventWaitHandleAccessRule(identityReference, accessMask, isInherited, inheritanceFlags, propagationFlags, type); } public override AuditRule AuditRuleFactory(IdentityReference identityReference, int accessMask, bool isInherited, InheritanceFlags inheritanceFlags, PropagationFlags propagationFlags, AuditFlags flags) { return new EventWaitHandleAuditRule(identityReference, accessMask, isInherited, inheritanceFlags, propagationFlags, flags); } internal AccessControlSections GetAccessControlSectionsFromChanges() { AccessControlSections persistRules = AccessControlSections.None; if (AccessRulesModified) persistRules = AccessControlSections.Access; if (AuditRulesModified) persistRules |= AccessControlSections.Audit; if (OwnerModified) persistRules |= AccessControlSections.Owner; if (GroupModified) persistRules |= AccessControlSections.Group; return persistRules; } [System.Security.SecurityCritical] // auto-generated internal void Persist(SafeWaitHandle handle) { // // Let the underlying ACL API's demand unmanaged code. // WriteLock(); try { AccessControlSections persistSections = GetAccessControlSectionsFromChanges(); if (persistSections == AccessControlSections.None) return; // Don't need to persist anything. base.Persist(handle, persistSections); OwnerModified = GroupModified = AuditRulesModified = AccessRulesModified = false; } finally { WriteUnlock(); } } public void AddAccessRule(EventWaitHandleAccessRule rule) { base.AddAccessRule(rule); } public void SetAccessRule(EventWaitHandleAccessRule rule) { base.SetAccessRule(rule); } public void ResetAccessRule(EventWaitHandleAccessRule rule) { base.ResetAccessRule(rule); } public bool RemoveAccessRule(EventWaitHandleAccessRule rule) { return base.RemoveAccessRule(rule); } public void RemoveAccessRuleAll(EventWaitHandleAccessRule rule) { base.RemoveAccessRuleAll(rule); } public void RemoveAccessRuleSpecific(EventWaitHandleAccessRule rule) { base.RemoveAccessRuleSpecific(rule); } public void AddAuditRule(EventWaitHandleAuditRule rule) { base.AddAuditRule(rule); } public void SetAuditRule(EventWaitHandleAuditRule rule) { base.SetAuditRule(rule); } public bool RemoveAuditRule(EventWaitHandleAuditRule rule) { return base.RemoveAuditRule(rule); } public void RemoveAuditRuleAll(EventWaitHandleAuditRule rule) { base.RemoveAuditRuleAll(rule); } public void RemoveAuditRuleSpecific(EventWaitHandleAuditRule rule) { base.RemoveAuditRuleSpecific(rule); } public override Type AccessRightType { get { return typeof(EventWaitHandleRights); } } public override Type AccessRuleType { get { return typeof(EventWaitHandleAccessRule); } } public override Type AuditRuleType { get { return typeof(EventWaitHandleAuditRule); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using System.Diagnostics; using System.Globalization; using System.Runtime.InteropServices; using System.ComponentModel; using System.Configuration; using System.DirectoryServices.Protocols; using System.DirectoryServices; using System.Net; using System.Text; using System.Threading; using System.Collections; using System.Security.Permissions; namespace System.DirectoryServices.AccountManagement { internal struct ServerProperties { public string dnsHostName; public DomainControllerMode OsVersion; public ContextType contextType; public string[] SupportCapabilities; public int portSSL; public int portLDAP; }; internal enum DomainControllerMode { Win2k = 0, Win2k3 = 2, WinLH = 3 }; static internal class CapabilityMap { public const string LDAP_CAP_ACTIVE_DIRECTORY_OID = "1.2.840.113556.1.4.800"; public const string LDAP_CAP_ACTIVE_DIRECTORY_V51_OID = "1.2.840.113556.1.4.1670"; public const string LDAP_CAP_ACTIVE_DIRECTORY_LDAP_INTEG_OID = "1.2.840.113556.1.4.1791"; public const string LDAP_CAP_ACTIVE_DIRECTORY_ADAM_OID = "1.2.840.113556.1.4.1851"; public const string LDAP_CAP_ACTIVE_DIRECTORY_PARTIAL_SECRETS_OID = "1.2.840.113556.1.4.1920"; public const string LDAP_CAP_ACTIVE_DIRECTORY_V61_OID = "1.2.840.113556.1.4.1935"; } internal sealed class CredentialValidator { private enum AuthMethod { Simple = 1, Negotiate = 2 } private bool _fastConcurrentSupported = true; private Hashtable _connCache = new Hashtable(4); private LdapDirectoryIdentifier _directoryIdent; private object _cacheLock = new object(); private AuthMethod _lastBindMethod = AuthMethod.Simple; private string _serverName; private ContextType _contextType; private ServerProperties _serverProperties; private const ContextOptions defaultContextOptionsNegotiate = ContextOptions.Signing | ContextOptions.Sealing | ContextOptions.Negotiate; private const ContextOptions defaultContextOptionsSimple = ContextOptions.SecureSocketLayer | ContextOptions.SimpleBind; public CredentialValidator(ContextType contextType, string serverName, ServerProperties serverProperties) { _fastConcurrentSupported = !(serverProperties.OsVersion == DomainControllerMode.Win2k); if (contextType == ContextType.Machine && serverName == null) { _serverName = Environment.MachineName; } else { _serverName = serverName; } _contextType = contextType; _serverProperties = serverProperties; } private bool BindSam(string target, string userName, string password) { StringBuilder adsPath = new StringBuilder(); adsPath.Append("WinNT://"); adsPath.Append(_serverName); adsPath.Append(",computer"); Guid g = new Guid("fd8256d0-fd15-11ce-abc4-02608c9e7553"); // IID_IUnknown object value = null; // always attempt secure auth.. int authenticationType = 1; object unmanagedResult = null; try { if (Thread.CurrentThread.GetApartmentState() == ApartmentState.Unknown) Thread.CurrentThread.SetApartmentState(ApartmentState.MTA); // We need the credentials to be in the form <machine>\\<user> // if they just passed user then append the machine name here. if (null != userName) { int index = userName.IndexOf("\\", StringComparison.Ordinal); if (index == -1) { userName = _serverName + "\\" + userName; } } int hr = UnsafeNativeMethods.ADsOpenObject(adsPath.ToString(), userName, password, (int)authenticationType, ref g, out value); if (hr != 0) { if (hr == unchecked((int)(ExceptionHelper.ERROR_HRESULT_LOGON_FAILURE))) { // This is the invalid credetials case. We want to return false // instead of throwing an exception return false; } else { throw ExceptionHelper.GetExceptionFromErrorCode(hr); } } unmanagedResult = ((UnsafeNativeMethods.IADs)value).Get("name"); } catch (System.Runtime.InteropServices.COMException e) { if (e.ErrorCode == unchecked((int)(ExceptionHelper.ERROR_HRESULT_LOGON_FAILURE))) { return false; } else { throw ExceptionHelper.GetExceptionFromCOMException(e); } } finally { if (value != null) System.Runtime.InteropServices.Marshal.ReleaseComObject(value); } return true; } private bool BindLdap(NetworkCredential creds, ContextOptions contextOptions) { LdapConnection current = null; bool useSSL = (ContextOptions.SecureSocketLayer & contextOptions) > 0; if (_contextType == ContextType.ApplicationDirectory) { _directoryIdent = new LdapDirectoryIdentifier(_serverProperties.dnsHostName, useSSL ? _serverProperties.portSSL : _serverProperties.portLDAP); } else { _directoryIdent = new LdapDirectoryIdentifier(_serverName, useSSL ? LdapConstants.LDAP_SSL_PORT : LdapConstants.LDAP_PORT); } bool attemptFastConcurrent = useSSL && _fastConcurrentSupported; int index = Convert.ToInt32(attemptFastConcurrent) * 2 + Convert.ToInt32(useSSL); if (!_connCache.Contains(index)) { lock (_cacheLock) { if (!_connCache.Contains(index)) { current = new LdapConnection(_directoryIdent); // First attempt to turn on SSL current.SessionOptions.SecureSocketLayer = useSSL; if (attemptFastConcurrent) { try { current.SessionOptions.FastConcurrentBind(); } catch (PlatformNotSupportedException) { current.Dispose(); current = null; _fastConcurrentSupported = false; index = Convert.ToInt32(useSSL); current = new LdapConnection(_directoryIdent); // We have fallen back to another connection so we need to set SSL again. current.SessionOptions.SecureSocketLayer = useSSL; } } _connCache.Add(index, current); } else { current = (LdapConnection)_connCache[index]; } } } else { current = (LdapConnection)_connCache[index]; } // If we are performing fastConcurrentBind there is no need to prevent multithreadaccess. FSB is thread safe and multi cred safe // FSB also always has the same contextoptions so there is no need to lock the code that is modifying the current connection if (attemptFastConcurrent && _fastConcurrentSupported) { lockedLdapBind(current, creds, contextOptions); } else { lock (_cacheLock) { lockedLdapBind(current, creds, contextOptions); } } return true; } private void lockedLdapBind(LdapConnection current, NetworkCredential creds, ContextOptions contextOptions) { current.AuthType = ((ContextOptions.SimpleBind & contextOptions) > 0 ? AuthType.Basic : AuthType.Negotiate); current.SessionOptions.Signing = ((ContextOptions.Signing & contextOptions) > 0 ? true : false); current.SessionOptions.Sealing = ((ContextOptions.Sealing & contextOptions) > 0 ? true : false); if ((null == creds.UserName) && (null == creds.Password)) { current.Bind(); } else { current.Bind(creds); } } public bool Validate(string userName, string password) { NetworkCredential networkCredential = new NetworkCredential(userName, password); // empty username and password on the local box // causes authentication to succeed. If the username is empty we should just fail it // here. if (userName != null && userName.Length == 0) return false; if (_contextType == ContextType.Domain || _contextType == ContextType.ApplicationDirectory) { try { if (_lastBindMethod == AuthMethod.Simple && (_fastConcurrentSupported || _contextType == ContextType.ApplicationDirectory)) { try { BindLdap(networkCredential, defaultContextOptionsSimple); _lastBindMethod = AuthMethod.Simple; return true; } catch (LdapException) { // we don't return false here even if we failed with ERROR_LOGON_FAILURE. We must check Negotiate // because there might be cases in which SSL fails and Negotiate succeeds } BindLdap(networkCredential, defaultContextOptionsNegotiate); _lastBindMethod = AuthMethod.Negotiate; return true; } else { try { BindLdap(networkCredential, defaultContextOptionsNegotiate); _lastBindMethod = AuthMethod.Negotiate; return true; } catch (LdapException) { // we don't return false here even if we failed with ERROR_LOGON_FAILURE. We must check SSL // because there might be cases in which Negotiate fails and SSL succeeds } BindLdap(networkCredential, defaultContextOptionsSimple); _lastBindMethod = AuthMethod.Simple; return true; } } catch (LdapException ldapex) { // If we got here it means that both SSL and Negotiate failed. Tough luck. if (ldapex.ErrorCode == ExceptionHelper.ERROR_LOGON_FAILURE) { return false; } throw; } } else { Debug.Assert(_contextType == ContextType.Machine); return (BindSam(_serverName, userName, password)); } } public bool Validate(string userName, string password, ContextOptions connectionMethod) { // empty username and password on the local box // causes authentication to succeed. If the username is empty we should just fail it // here. if (userName != null && userName.Length == 0) return false; if (_contextType == ContextType.Domain || _contextType == ContextType.ApplicationDirectory) { try { NetworkCredential networkCredential = new NetworkCredential(userName, password); BindLdap(networkCredential, connectionMethod); return true; } catch (LdapException ldapex) { if (ldapex.ErrorCode == ExceptionHelper.ERROR_LOGON_FAILURE) { return false; } throw; } } else { return (BindSam(_serverName, userName, password)); } } } // ******************************************** public class PrincipalContext : IDisposable { // // Public Constructors // public PrincipalContext(ContextType contextType) : this(contextType, null, null, PrincipalContext.GetDefaultOptionForStore(contextType), null, null) { } public PrincipalContext(ContextType contextType, string name) : this(contextType, name, null, PrincipalContext.GetDefaultOptionForStore(contextType), null, null) { } public PrincipalContext(ContextType contextType, string name, string container) : this(contextType, name, container, PrincipalContext.GetDefaultOptionForStore(contextType), null, null) { } public PrincipalContext(ContextType contextType, string name, string container, ContextOptions options) : this(contextType, name, container, options, null, null) { } public PrincipalContext(ContextType contextType, string name, string userName, string password) : this(contextType, name, null, PrincipalContext.GetDefaultOptionForStore(contextType), userName, password) { } public PrincipalContext(ContextType contextType, string name, string container, string userName, string password) : this(contextType, name, container, PrincipalContext.GetDefaultOptionForStore(contextType), userName, password) { } public PrincipalContext( ContextType contextType, string name, string container, ContextOptions options, string userName, string password) { GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "Entering ctor"); if ((userName == null && password != null) || (userName != null && password == null)) throw new ArgumentException(SR.ContextBadUserPwdCombo); if ((options & ~(ContextOptions.Signing | ContextOptions.Negotiate | ContextOptions.Sealing | ContextOptions.SecureSocketLayer | ContextOptions.SimpleBind | ContextOptions.ServerBind)) != 0) throw new InvalidEnumArgumentException("options", (int)options, typeof(ContextOptions)); if (contextType == ContextType.Machine && ((options & ~ContextOptions.Negotiate) != 0)) { throw new ArgumentException(SR.InvalidContextOptionsForMachine); } if ((contextType == ContextType.Domain || contextType == ContextType.ApplicationDirectory) && (((options & (ContextOptions.Negotiate | ContextOptions.SimpleBind)) == 0) || (((options & (ContextOptions.Negotiate | ContextOptions.SimpleBind)) == ((ContextOptions.Negotiate | ContextOptions.SimpleBind)))))) { throw new ArgumentException(SR.InvalidContextOptionsForAD); } if ((contextType != ContextType.Machine) && (contextType != ContextType.Domain) && (contextType != ContextType.ApplicationDirectory) #if TESTHOOK && (contextType != ContextType.Test) #endif ) { throw new InvalidEnumArgumentException("contextType", (int)contextType, typeof(ContextType)); } if ((contextType == ContextType.Machine) && (container != null)) throw new ArgumentException(SR.ContextNoContainerForMachineCtx); if ((contextType == ContextType.ApplicationDirectory) && ((String.IsNullOrEmpty(container)) || (String.IsNullOrEmpty(name)))) throw new ArgumentException(SR.ContextNoContainerForApplicationDirectoryCtx); _contextType = contextType; _name = name; _container = container; _options = options; _username = userName; _password = password; DoServerVerifyAndPropRetrieval(); _credValidate = new CredentialValidator(contextType, name, _serverProperties); } // // Public Properties // public ContextType ContextType { get { CheckDisposed(); return _contextType; } } public string Name { get { CheckDisposed(); return _name; } } public string Container { get { CheckDisposed(); return _container; } } public string UserName { get { CheckDisposed(); return _username; } } public ContextOptions Options { get { CheckDisposed(); return _options; } } public string ConnectedServer { get { CheckDisposed(); Initialize(); // Unless we're not initialized, connectedServer should not be null Debug.Assert(_connectedServer != null || _initialized == false); // connectedServer should never be an empty string Debug.Assert(_connectedServer == null || _connectedServer.Length != 0); return _connectedServer; } } /// <summary> /// Validate the passed credentials against the directory supplied. // This function will use the best determined method to do the evaluation /// </summary> public bool ValidateCredentials(string userName, string password) { CheckDisposed(); if ((userName == null && password != null) || (userName != null && password == null)) throw new ArgumentException(SR.ContextBadUserPwdCombo); #if TESTHOOK if ( contextType == ContextType.Test ) { return true; } #endif return (_credValidate.Validate(userName, password)); } /// <summary> /// Validate the passed credentials against the directory supplied. // The supplied options will determine the directory method for credential validation. /// </summary> public bool ValidateCredentials(string userName, string password, ContextOptions options) { // Perform credential validation using fast concurrent bind... CheckDisposed(); if ((userName == null && password != null) || (userName != null && password == null)) throw new ArgumentException(SR.ContextBadUserPwdCombo); if (options != ContextOptions.Negotiate && _contextType == ContextType.Machine) throw new ArgumentException(SR.ContextOptionsNotValidForMachineStore); #if TESTHOOK if ( contextType == ContextType.Test ) { return true; } #endif return (_credValidate.Validate(userName, password, options)); } // // Private methods for initialization // private void Initialize() { if (!_initialized) { lock (_initializationLock) { if (_initialized) return; GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "Initializing Context"); switch (_contextType) { case ContextType.Domain: DoDomainInit(); break; case ContextType.Machine: DoMachineInit(); break; case ContextType.ApplicationDirectory: DoApplicationDirectoryInit(); break; #if TESTHOOK case ContextType.Test: // do nothing break; #endif default: // Internal error Debug.Fail("PrincipalContext.Initialize: fell off end looking for " + _contextType.ToString()); break; } _initialized = true; } } } private void DoApplicationDirectoryInit() { GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "Entering DoApplicationDirecotryInit"); Debug.Assert(_contextType == ContextType.ApplicationDirectory); if (_container == null) { GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "DoApplicationDirecotryInit: using no-container path"); DoLDAPDirectoryInitNoContainer(); } else { GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "DoApplicationDirecotryInit: using container path"); DoLDAPDirectoryInit(); } } private void DoMachineInit() { GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "Entering DoMachineInit"); Debug.Assert(_contextType == ContextType.Machine); Debug.Assert(_container == null); DirectoryEntry de = null; try { string hostname = _name; if (hostname == null) hostname = Utils.GetComputerFlatName(); GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "DoMachineInit: hostname is " + hostname); // use the options they specified AuthenticationTypes authTypes = SDSUtils.MapOptionsToAuthTypes(_options); GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "DoMachineInit: authTypes is " + authTypes.ToString()); de = new DirectoryEntry("WinNT://" + hostname + ",computer", _username, _password, authTypes); // Force ADSI to connect so we detect if the server is down or if the servername is invalid de.RefreshCache(); StoreCtx storeCtx = CreateContextFromDirectoryEntry(de); _queryCtx = storeCtx; _userCtx = storeCtx; _groupCtx = storeCtx; _computerCtx = storeCtx; _connectedServer = hostname; de = null; } catch (Exception e) { GlobalDebug.WriteLineIf(GlobalDebug.Error, "PrincipalContext", "DoMachineInit: caught exception of type " + e.GetType().ToString() + " and message " + e.Message); // Cleanup the DE on failure if (de != null) de.Dispose(); throw; } } private void DoDomainInit() { GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "Entering DoDomainInit"); Debug.Assert(_contextType == ContextType.Domain); if (_container == null) { GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "DoDomainInit: using no-container path"); DoLDAPDirectoryInitNoContainer(); return; } else { GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "DoDomainInit: using container path"); DoLDAPDirectoryInit(); return; } } private void DoServerVerifyAndPropRetrieval() { _serverProperties = new ServerProperties(); if (_contextType == ContextType.ApplicationDirectory || _contextType == ContextType.Domain) { ReadServerConfig(_name, ref _serverProperties); if (_serverProperties.contextType != _contextType) { throw new ArgumentException(string.Format(CultureInfo.CurrentCulture, SR.PassedContextTypeDoesNotMatchDetectedType, _serverProperties.contextType.ToString())); } } } private void DoLDAPDirectoryInit() { // use the servername if they gave us one, else let ADSI figure it out string serverName = ""; if (_name != null) { if (_contextType == ContextType.ApplicationDirectory) { serverName = _serverProperties.dnsHostName + ":" + ((ContextOptions.SecureSocketLayer & _options) > 0 ? _serverProperties.portSSL : _serverProperties.portLDAP); } else { serverName = _name; } serverName += "/"; } GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "DoLDAPDirectoryInit: serverName is " + serverName); // use the options they specified AuthenticationTypes authTypes = SDSUtils.MapOptionsToAuthTypes(_options); GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "DoLDAPDirectoryInit: authTypes is " + authTypes.ToString()); DirectoryEntry de = new DirectoryEntry("LDAP://" + serverName + _container, _username, _password, authTypes); try { // Set the password port to the ssl port read off of the rootDSE. Without this // password change/set won't work when we connect without SSL and ADAM is running // on non-standard port numbers. We have already verified directory connectivity at this point // so this should always succeed. if (_serverProperties.portSSL > 0) { de.Options.PasswordPort = _serverProperties.portSSL; } StoreCtx storeCtx = CreateContextFromDirectoryEntry(de); _queryCtx = storeCtx; _userCtx = storeCtx; _groupCtx = storeCtx; _computerCtx = storeCtx; _connectedServer = ADUtils.GetServerName(de); de = null; } catch (System.Runtime.InteropServices.COMException e) { throw ExceptionHelper.GetExceptionFromCOMException(e); } catch (Exception e) { GlobalDebug.WriteLineIf(GlobalDebug.Error, "PrincipalContext", "DoLDAPDirectoryInit: caught exception of type " + e.GetType().ToString() + " and message " + e.Message); throw; } finally { // Cleanup the DE on failure if (de != null) de.Dispose(); } } private void DoLDAPDirectoryInitNoContainer() { byte[] USERS_CONTAINER_GUID = new byte[] { 0xa9, 0xd1, 0xca, 0x15, 0x76, 0x88, 0x11, 0xd1, 0xad, 0xed, 0x00, 0xc0, 0x4f, 0xd8, 0xd5, 0xcd }; byte[] COMPUTERS_CONTAINER_GUID = new byte[] { 0xaa, 0x31, 0x28, 0x25, 0x76, 0x88, 0x11, 0xd1, 0xad, 0xed, 0x00, 0xc0, 0x4f, 0xd8, 0xd5, 0xcd }; // The StoreCtxs that will be used in the PrincipalContext, and their associated DirectoryEntry objects. DirectoryEntry deUserGroupOrg = null; DirectoryEntry deComputer = null; DirectoryEntry deBase = null; ADStoreCtx storeCtxUserGroupOrg = null; ADStoreCtx storeCtxComputer = null; ADStoreCtx storeCtxBase = null; GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "Entering DoLDAPDirectoryInitNoContainer"); // // Build a DirectoryEntry that represents the root of the domain. // // Use the RootDSE to find the default naming context DirectoryEntry deRootDse = null; string adsPathBase; // use the servername if they gave us one, else let ADSI figure it out string serverName = ""; if (_name != null) { serverName = _name + "/"; } GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "DoLDAPDirectoryInitNoContainer: serverName is " + serverName); // use the options they specified AuthenticationTypes authTypes = SDSUtils.MapOptionsToAuthTypes(_options); GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "DoLDAPDirectoryInitNoContainer: authTypes is " + authTypes.ToString()); try { deRootDse = new DirectoryEntry("LDAP://" + serverName + "rootDse", _username, _password, authTypes); // This will also detect if the server is down or nonexistent string domainNC = (string)deRootDse.Properties["defaultNamingContext"][0]; adsPathBase = "LDAP://" + serverName + domainNC; GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "DoLDAPDirectoryInitNoContainer: domainNC is " + domainNC); GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "DoLDAPDirectoryInitNoContainer: adsPathBase is " + adsPathBase); } finally { // Don't allow the DE to leak if (deRootDse != null) deRootDse.Dispose(); } try { // Build a DE for the root of the domain using the retrieved naming context deBase = new DirectoryEntry(adsPathBase, _username, _password, authTypes); // Set the password port to the ssl port read off of the rootDSE. Without this // password change/set won't work when we connect without SSL and ADAM is running // on non-standard port numbers. We have already verified directory connectivity at this point // so this should always succeed. if (_serverProperties.portSSL > 0) { deBase.Options.PasswordPort = _serverProperties.portSSL; } // // Use the wellKnownObjects attribute to determine the default location // for users and computers. // string adsPathUserGroupOrg = null; string adsPathComputer = null; PropertyValueCollection wellKnownObjectValues = deBase.Properties["wellKnownObjects"]; foreach (UnsafeNativeMethods.IADsDNWithBinary value in wellKnownObjectValues) { if (Utils.AreBytesEqual(USERS_CONTAINER_GUID, (byte[])value.BinaryValue)) { Debug.Assert(adsPathUserGroupOrg == null); adsPathUserGroupOrg = "LDAP://" + serverName + value.DNString; GlobalDebug.WriteLineIf( GlobalDebug.Info, "PrincipalContext", "DoLDAPDirectoryInitNoContainer: found USER, adsPathUserGroupOrg is " + adsPathUserGroupOrg); } // Is it the computer container? if (Utils.AreBytesEqual(COMPUTERS_CONTAINER_GUID, (byte[])value.BinaryValue)) { Debug.Assert(adsPathComputer == null); adsPathComputer = "LDAP://" + serverName + value.DNString; GlobalDebug.WriteLineIf( GlobalDebug.Info, "PrincipalContext", "DoLDAPDirectoryInitNoContainer: found COMPUTER, adsPathComputer is " + adsPathComputer); } } if ((adsPathUserGroupOrg == null) || (adsPathComputer == null)) { // Something's wrong with the domain, it's not exposing the proper // well-known object fields. throw new PrincipalOperationException(SR.ContextNoWellKnownObjects); } // // Build DEs for the Users and Computers containers. // The Users container will also be used as the default for Groups. // The reason there are different contexts for groups, users and computers is so that // when a principal is created it will go into the appropriate default container. This is so users don't // be default create principals in the root of their directory. When a search happens the base context is used so that // the whole directory will be covered. // deUserGroupOrg = new DirectoryEntry(adsPathUserGroupOrg, _username, _password, authTypes); deComputer = new DirectoryEntry(adsPathComputer, _username, _password, authTypes); StoreCtx userStore = CreateContextFromDirectoryEntry(deUserGroupOrg); _userCtx = userStore; _groupCtx = userStore; deUserGroupOrg = null; // since we handed off ownership to the StoreCtx _computerCtx = CreateContextFromDirectoryEntry(deComputer); deComputer = null; _queryCtx = CreateContextFromDirectoryEntry(deBase); _connectedServer = ADUtils.GetServerName(deBase); deBase = null; } catch (Exception e) { GlobalDebug.WriteLineIf(GlobalDebug.Error, "PrincipalContext", "DoLDAPDirectoryInitNoContainer: caught exception of type " + e.GetType().ToString() + " and message " + e.Message); // Cleanup on failure. Once a DE has been successfully handed off to a ADStoreCtx, // that ADStoreCtx will handle Dispose()'ing it if (deUserGroupOrg != null) deUserGroupOrg.Dispose(); if (deComputer != null) deComputer.Dispose(); if (deBase != null) deBase.Dispose(); if (storeCtxUserGroupOrg != null) storeCtxUserGroupOrg.Dispose(); if (storeCtxComputer != null) storeCtxComputer.Dispose(); if (storeCtxBase != null) storeCtxBase.Dispose(); throw; } } #if TESTHOOK static public PrincipalContext Test { get { StoreCtx storeCtx = new TestStoreCtx(true); PrincipalContext ctx = new PrincipalContext(ContextType.Test); ctx.SetupContext(storeCtx); ctx.initialized = true; storeCtx.OwningContext = ctx; return ctx; } } static public PrincipalContext TestAltValidation { get { TestStoreCtx storeCtx = new TestStoreCtx(true); storeCtx.SwitchValidationMode = true; PrincipalContext ctx = new PrincipalContext(ContextType.Test); ctx.SetupContext(storeCtx); ctx.initialized = true; storeCtx.OwningContext = ctx; return ctx; } } static public PrincipalContext TestNoTimeLimited { get { TestStoreCtx storeCtx = new TestStoreCtx(true); storeCtx.SupportTimeLimited = false; PrincipalContext ctx = new PrincipalContext(ContextType.Test); ctx.SetupContext(storeCtx); ctx.initialized = true; storeCtx.OwningContext = ctx; return ctx; } } #endif // TESTHOOK // // Public Methods // public void Dispose() { if (!_disposed) { GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "Dispose: disposing"); // Note that we may end up calling Dispose multiple times on the same // StoreCtx (since, for example, it might be that userCtx == groupCtx). // This is okay, since StoreCtxs allow multiple Dispose() calls, and ignore // all but the first call. if (_userCtx != null) _userCtx.Dispose(); if (_groupCtx != null) _groupCtx.Dispose(); if (_computerCtx != null) _computerCtx.Dispose(); if (_queryCtx != null) _queryCtx.Dispose(); _disposed = true; GC.SuppressFinalize(this); } } // // Private Implementation // // Are we initialized? private bool _initialized = false; private object _initializationLock = new object(); // Have we been disposed? private bool _disposed = false; internal bool Disposed { get { return _disposed; } } // Our constructor parameters // encryption nor zeroing out the string when you're done with it. private string _username; private string _password; // Cached connections to the server for fast credential validation private CredentialValidator _credValidate; private ServerProperties _serverProperties; internal ServerProperties ServerInformation { get { return _serverProperties; } } private string _name; private string _container; private ContextOptions _options; private ContextType _contextType; // The server we're connected to private string _connectedServer = null; // The reason there are different contexts for groups, users and computers is so that // when a principal is created it will go into the appropriate default container. This is so users don't // by default create principals in the root of their directory. When a search happens the base context is used so that // the whole directory will be covered. User and Computers default are the same ( USERS container ), Computers are // put under COMPUTERS container. If a container is specified then all the contexts will point to the same place. // The StoreCtx to be used when inserting a new User/Computer/Group Principal into this // PrincipalContext. private StoreCtx _userCtx = null; private StoreCtx _computerCtx = null; private StoreCtx _groupCtx = null; // The StoreCtx to be used when querying against this PrincipalContext for Principals private StoreCtx _queryCtx = null; internal StoreCtx QueryCtx { get { Initialize(); return _queryCtx; } set { _queryCtx = value; } } internal void ReadServerConfig(string serverName, ref ServerProperties properties) { string[] proplist = new string[] { "msDS-PortSSL", "msDS-PortLDAP", "domainControllerFunctionality", "dnsHostName", "supportedCapabilities" }; LdapConnection ldapConnection = null; try { bool useSSL = (_options & ContextOptions.SecureSocketLayer) > 0; if (useSSL && _contextType == ContextType.Domain) { LdapDirectoryIdentifier directoryid = new LdapDirectoryIdentifier(serverName, LdapConstants.LDAP_SSL_PORT); ldapConnection = new LdapConnection(directoryid); } else { ldapConnection = new LdapConnection(serverName); } ldapConnection.AutoBind = false; // If SSL was enabled on the initial connection then turn it on for the search. // This is requried bc the appended port number will be SSL and we don't know what port LDAP is running on. ldapConnection.SessionOptions.SecureSocketLayer = useSSL; string baseDN = null; // specify base as null for RootDSE search string ldapSearchFilter = "(objectClass=*)"; SearchResponse searchResponse = null; SearchRequest searchRequest = new SearchRequest(baseDN, ldapSearchFilter, System.DirectoryServices.Protocols .SearchScope.Base, proplist); try { searchResponse = (SearchResponse)ldapConnection.SendRequest(searchRequest); } catch (LdapException ex) { throw new PrincipalServerDownException(SR.ServerDown, ex); } // Fill in the struct with the casted properties from the serach results. // there will always be only 1 item on the rootDSE so all entry indexes are 0 properties.dnsHostName = (string)searchResponse.Entries[0].Attributes["dnsHostName"][0]; properties.SupportCapabilities = new string[searchResponse.Entries[0].Attributes["supportedCapabilities"].Count]; for (int i = 0; i < searchResponse.Entries[0].Attributes["supportedCapabilities"].Count; i++) { properties.SupportCapabilities[i] = (string)searchResponse.Entries[0].Attributes["supportedCapabilities"][i]; } foreach (string capability in properties.SupportCapabilities) { if (CapabilityMap.LDAP_CAP_ACTIVE_DIRECTORY_ADAM_OID == capability) { properties.contextType = ContextType.ApplicationDirectory; } else if (CapabilityMap.LDAP_CAP_ACTIVE_DIRECTORY_OID == capability) { properties.contextType = ContextType.Domain; } } // If we can't determine the OS vesion so we must fall back to lowest level of functionality if (searchResponse.Entries[0].Attributes.Contains("domainControllerFunctionality")) { properties.OsVersion = (DomainControllerMode)Convert.ToInt32(searchResponse.Entries[0].Attributes["domainControllerFunctionality"][0], CultureInfo.InvariantCulture); } else { properties.OsVersion = DomainControllerMode.Win2k; } if (properties.contextType == ContextType.ApplicationDirectory) { if (searchResponse.Entries[0].Attributes.Contains("msDS-PortSSL")) { properties.portSSL = Convert.ToInt32(searchResponse.Entries[0].Attributes["msDS-PortSSL"][0]); } if (searchResponse.Entries[0].Attributes.Contains("msDS-PortLDAP")) { properties.portLDAP = Convert.ToInt32(searchResponse.Entries[0].Attributes["msDS-PortLDAP"][0]); } } GlobalDebug.WriteLineIf(GlobalDebug.Info, "ReadServerConfig", "OsVersion : " + properties.OsVersion.ToString()); GlobalDebug.WriteLineIf(GlobalDebug.Info, "ReadServerConfig", "dnsHostName : " + properties.dnsHostName); GlobalDebug.WriteLineIf(GlobalDebug.Info, "ReadServerConfig", "contextType : " + properties.contextType.ToString()); GlobalDebug.WriteLineIf(GlobalDebug.Info, "ReadServerConfig", "portSSL : " + properties.portSSL.ToString(CultureInfo.InvariantCulture)); GlobalDebug.WriteLineIf(GlobalDebug.Info, "ReadServerConfig", "portLDAP :" + properties.portLDAP.ToString(CultureInfo.InvariantCulture)); } finally { if (ldapConnection != null) { ldapConnection.Dispose(); } } } private StoreCtx CreateContextFromDirectoryEntry(DirectoryEntry entry) { StoreCtx storeCtx; Debug.Assert(entry != null); GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "CreateContextFromDirectoryEntry: path is " + entry.Path); if (entry.Path.StartsWith("LDAP:", StringComparison.Ordinal)) { if (this.ContextType == ContextType.ApplicationDirectory) { storeCtx = new ADAMStoreCtx(entry, true, _username, _password, _name, _options); } else { storeCtx = new ADStoreCtx(entry, true, _username, _password, _options); } } else { Debug.Assert(entry.Path.StartsWith("WinNT:", StringComparison.Ordinal)); storeCtx = new SAMStoreCtx(entry, true, _username, _password, _options); } storeCtx.OwningContext = this; return storeCtx; } // Checks if we're already been disposed, and throws an appropriate // exception if so. internal void CheckDisposed() { if (_disposed) { GlobalDebug.WriteLineIf(GlobalDebug.Warn, "PrincipalContext", "CheckDisposed: accessing disposed object"); throw new ObjectDisposedException("PrincipalContext"); } } // Match the default context options to the store type. private static ContextOptions GetDefaultOptionForStore(ContextType storeType) { if (storeType == ContextType.Machine) { return DefaultContextOptions.MachineDefaultContextOption; } else { return DefaultContextOptions.ADDefaultContextOption; } } // Helper method: given a typeof(User/Computer/etc.), returns the userCtx/computerCtx/etc. internal StoreCtx ContextForType(Type t) { GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "ContextForType: type is " + t.ToString()); Initialize(); if (t == typeof(System.DirectoryServices.AccountManagement.UserPrincipal) || t.IsSubclassOf(typeof(System.DirectoryServices.AccountManagement.UserPrincipal))) { return _userCtx; } else if (t == typeof(System.DirectoryServices.AccountManagement.ComputerPrincipal) || t.IsSubclassOf(typeof(System.DirectoryServices.AccountManagement.ComputerPrincipal))) { return _computerCtx; } else if (t == typeof(System.DirectoryServices.AccountManagement.AuthenticablePrincipal) || t.IsSubclassOf(typeof(System.DirectoryServices.AccountManagement.AuthenticablePrincipal))) { return _userCtx; } else { Debug.Assert(t == typeof(System.DirectoryServices.AccountManagement.GroupPrincipal) || t.IsSubclassOf(typeof(System.DirectoryServices.AccountManagement.GroupPrincipal))); return _groupCtx; } } } }
using System; using System.Runtime.InteropServices; using System.Security; using SFML.Window; using SFML.System; namespace SFML { namespace Graphics { //////////////////////////////////////////////////////////// /// <summary> /// Base class for textured shapes with outline /// </summary> //////////////////////////////////////////////////////////// public abstract class Shape : Transformable, Drawable { //////////////////////////////////////////////////////////// /// <summary> /// Source texture of the shape /// </summary> //////////////////////////////////////////////////////////// public Texture Texture { get { return myTexture; } set { myTexture = value; sfShape_setTexture(CPointer, value != null ? value.CPointer : IntPtr.Zero, false); } } //////////////////////////////////////////////////////////// /// <summary> /// Sub-rectangle of the texture that the shape will display /// </summary> //////////////////////////////////////////////////////////// public IntRect TextureRect { get { return sfShape_getTextureRect(CPointer); } set { sfShape_setTextureRect(CPointer, value); } } //////////////////////////////////////////////////////////// /// <summary> /// Fill color of the shape /// </summary> //////////////////////////////////////////////////////////// public Color FillColor { get { return sfShape_getFillColor(CPointer); } set { sfShape_setFillColor(CPointer, value); } } //////////////////////////////////////////////////////////// /// <summary> /// Outline color of the shape /// </summary> //////////////////////////////////////////////////////////// public Color OutlineColor { get { return sfShape_getOutlineColor(CPointer); } set { sfShape_setOutlineColor(CPointer, value); } } //////////////////////////////////////////////////////////// /// <summary> /// Thickness of the shape's outline /// </summary> //////////////////////////////////////////////////////////// public float OutlineThickness { get { return sfShape_getOutlineThickness(CPointer); } set { sfShape_setOutlineThickness(CPointer, value); } } //////////////////////////////////////////////////////////// /// <summary> /// Get the total number of points of the shape /// </summary> /// <returns>The total point count</returns> //////////////////////////////////////////////////////////// public abstract uint GetPointCount(); //////////////////////////////////////////////////////////// /// <summary> /// Get the position of a point /// /// The returned point is in local coordinates, that is, /// the shape's transforms (position, rotation, scale) are /// not taken into account. /// The result is undefined if index is out of the valid range. /// </summary> /// <param name="index">Index of the point to get, in range [0 .. PointCount - 1]</param> /// <returns>index-th point of the shape</returns> //////////////////////////////////////////////////////////// public abstract Vector2f GetPoint(uint index); //////////////////////////////////////////////////////////// /// <summary> /// Get the local bounding rectangle of the entity. /// /// The returned rectangle is in local coordinates, which means /// that it ignores the transformations (translation, rotation, /// scale, ...) that are applied to the entity. /// In other words, this function returns the bounds of the /// entity in the entity's coordinate system. /// </summary> /// <returns>Local bounding rectangle of the entity</returns> //////////////////////////////////////////////////////////// public FloatRect GetLocalBounds() { return sfShape_getLocalBounds(CPointer); } //////////////////////////////////////////////////////////// /// <summary> /// Get the global bounding rectangle of the entity. /// /// The returned rectangle is in global coordinates, which means /// that it takes in account the transformations (translation, /// rotation, scale, ...) that are applied to the entity. /// In other words, this function returns the bounds of the /// sprite in the global 2D world's coordinate system. /// </summary> /// <returns>Global bounding rectangle of the entity</returns> //////////////////////////////////////////////////////////// public FloatRect GetGlobalBounds() { // we don't use the native getGlobalBounds function, // because we override the object's transform return Transform.TransformRect(GetLocalBounds()); } //////////////////////////////////////////////////////////// /// <summmary> /// Draw the shape to a render target /// </summmary> /// <param name="target">Render target to draw to</param> /// <param name="states">Current render states</param> //////////////////////////////////////////////////////////// public void Draw(RenderTarget target, RenderStates states) { states.Transform *= Transform; RenderStates.MarshalData marshaledStates = states.Marshal(); if (target is RenderWindow) { sfRenderWindow_drawShape(((RenderWindow)target).CPointer, CPointer, ref marshaledStates); } else if (target is RenderTexture) { sfRenderTexture_drawShape(((RenderTexture)target).CPointer, CPointer, ref marshaledStates); } } //////////////////////////////////////////////////////////// /// <summary> /// Default constructor /// </summary> //////////////////////////////////////////////////////////// protected Shape() : base(IntPtr.Zero) { myGetPointCountCallback = new GetPointCountCallbackType(InternalGetPointCount); myGetPointCallback = new GetPointCallbackType(InternalGetPoint); CPointer = sfShape_create(myGetPointCountCallback, myGetPointCallback, IntPtr.Zero); } //////////////////////////////////////////////////////////// /// <summary> /// Construct the shape from another shape /// </summary> /// <param name="copy">Shape to copy</param> //////////////////////////////////////////////////////////// public Shape(Shape copy) : base(IntPtr.Zero) { myGetPointCountCallback = new GetPointCountCallbackType(InternalGetPointCount); myGetPointCallback = new GetPointCallbackType(InternalGetPoint); CPointer = sfShape_create(myGetPointCountCallback, myGetPointCallback, IntPtr.Zero); Origin = copy.Origin; Position = copy.Position; Rotation = copy.Rotation; Scale = copy.Scale; Texture = copy.Texture; TextureRect = copy.TextureRect; FillColor = copy.FillColor; OutlineColor = copy.OutlineColor; OutlineThickness = copy.OutlineThickness; } //////////////////////////////////////////////////////////// /// <summary> /// Recompute the internal geometry of the shape. /// /// This function must be called by the derived class everytime /// the shape's points change (ie. the result of either /// PointCount or GetPoint is different). /// </summary> //////////////////////////////////////////////////////////// protected void Update() { sfShape_update(CPointer); } //////////////////////////////////////////////////////////// /// <summary> /// Handle the destruction of the object /// </summary> /// <param name="disposing">Is the GC disposing the object, or is it an explicit call ?</param> //////////////////////////////////////////////////////////// protected override void Destroy(bool disposing) { sfShape_destroy(CPointer); } //////////////////////////////////////////////////////////// /// <summary> /// Callback passed to the C API /// </summary> //////////////////////////////////////////////////////////// private uint InternalGetPointCount(IntPtr userData) { return GetPointCount(); } //////////////////////////////////////////////////////////// /// <summary> /// Callback passed to the C API /// </summary> //////////////////////////////////////////////////////////// private Vector2f InternalGetPoint(uint index, IntPtr userData) { return GetPoint(index); } [UnmanagedFunctionPointer(CallingConvention.Cdecl)] private delegate uint GetPointCountCallbackType(IntPtr UserData); [UnmanagedFunctionPointer(CallingConvention.Cdecl)] private delegate Vector2f GetPointCallbackType(uint index, IntPtr UserData); private GetPointCountCallbackType myGetPointCountCallback; private GetPointCallbackType myGetPointCallback; private Texture myTexture = null; #region Imports [DllImport("csfml-graphics-2", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] static extern IntPtr sfShape_create(GetPointCountCallbackType getPointCount, GetPointCallbackType getPoint, IntPtr userData); [DllImport("csfml-graphics-2", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] static extern IntPtr sfShape_copy(IntPtr Shape); [DllImport("csfml-graphics-2", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] static extern void sfShape_destroy(IntPtr CPointer); [DllImport("csfml-graphics-2", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] static extern void sfShape_setTexture(IntPtr CPointer, IntPtr Texture, bool AdjustToNewSize); [DllImport("csfml-graphics-2", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] static extern void sfShape_setTextureRect(IntPtr CPointer, IntRect Rect); [DllImport("csfml-graphics-2", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] static extern IntRect sfShape_getTextureRect(IntPtr CPointer); [DllImport("csfml-graphics-2", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] static extern void sfShape_setFillColor(IntPtr CPointer, Color Color); [DllImport("csfml-graphics-2", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] static extern Color sfShape_getFillColor(IntPtr CPointer); [DllImport("csfml-graphics-2", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] static extern void sfShape_setOutlineColor(IntPtr CPointer, Color Color); [DllImport("csfml-graphics-2", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] static extern Color sfShape_getOutlineColor(IntPtr CPointer); [DllImport("csfml-graphics-2", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] static extern void sfShape_setOutlineThickness(IntPtr CPointer, float Thickness); [DllImport("csfml-graphics-2", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] static extern float sfShape_getOutlineThickness(IntPtr CPointer); [DllImport("csfml-graphics-2", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] static extern FloatRect sfShape_getLocalBounds(IntPtr CPointer); [DllImport("csfml-graphics-2", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] static extern void sfShape_update(IntPtr CPointer); [DllImport("csfml-graphics-2", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] static extern void sfRenderWindow_drawShape(IntPtr CPointer, IntPtr Shape, ref RenderStates.MarshalData states); [DllImport("csfml-graphics-2", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] static extern void sfRenderTexture_drawShape(IntPtr CPointer, IntPtr Shape, ref RenderStates.MarshalData states); #endregion } } }
// Copyright (c) 2006-2007 Frank Laub // All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // 1. Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // 2. Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // 3. The name of the author may not be used to endorse or promote products // derived from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. // IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT // NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF // THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. using System; using System.Collections.Generic; using System.Text; using System.IO; using OpenSSL.Crypto; namespace OpenSSL.Core { /// <summary> /// Encapsulates the BIO_* functions. /// </summary> public class BIO : Base { #region Initialization internal BIO(IntPtr ptr, bool owner) : base(ptr, owner) { } /// <summary> /// Calls BIO_new(BIO_s_mem()) and then BIO_write() the buf /// </summary> /// <param name="buf"></param> public BIO(byte[] buf) : base(Native.ExpectNonNull(Native.BIO_new(Native.BIO_s_mem())), true) { this.Write(buf); } /// <summary> /// Calls BIO_new(BIO_s_mem()) and then BIO_write() the str /// </summary> /// <param name="str"></param> public BIO(string str) : this(Encoding.ASCII.GetBytes(str)) { } /// <summary> /// Calls BIO_new(BIO_s_mem()) /// </summary> /// <param name="takeOwnership"></param> /// <returns></returns> public static BIO MemoryBuffer(bool takeOwnership) { IntPtr ptr = Native.ExpectNonNull(Native.BIO_new(Native.BIO_s_mem())); return new BIO(ptr, takeOwnership); } /// <summary> /// Factory method that calls BIO_new() with BIO_s_mem() /// </summary> /// <returns></returns> public static BIO MemoryBuffer() { return MemoryBuffer(true); } /// <summary> /// Factory method that calls BIO_new_file() /// </summary> /// <param name="filename"></param> /// <param name="mode"></param> /// <returns></returns> public static BIO File(string filename, string mode) { IntPtr ptr = Native.ExpectNonNull(Native.BIO_new_file(filename, mode)); return new BIO(ptr, true); } private const int FD_STDIN = 0; private const int FD_STDOUT = 1; private const int FD_STDERR = 2; /// <summary> /// Factory method that calls BIO_new() with BIO_f_md() /// </summary> /// <param name="md"></param> /// <returns></returns> public static BIO MessageDigest(MessageDigest md) { IntPtr ptr = Native.ExpectNonNull(Native.BIO_new(Native.BIO_f_md())); Native.BIO_set_md(ptr, md.Handle); return new BIO(ptr, true); } //public static BIO MessageDigestContext(MessageDigestContext ctx) //{ // IntPtr ptr = Native.ExpectNonNull(Native.BIO_new(Native.BIO_f_md())); // //IntPtr ptr = Native.ExpectNonNull(Native.BIO_new(Native.BIO_f_null())); // Native.BIO_set_md_ctx(ptr, ctx.Handle); // return new BIO(ptr); //} #endregion #region Properties /// <summary> /// Returns BIO_number_read() /// </summary> public uint NumberRead { get { return Native.BIO_number_read(this.Handle); } } /// <summary> /// Returns BIO_number_written() /// </summary> public uint NumberWritten { get { return Native.BIO_number_written(this.Handle); } } /// <summary> /// Returns number of bytes buffered in the BIO - calls BIO_ctrl_pending /// </summary> public uint BytesPending { get { return Native.BIO_ctrl_pending(this.Handle); } } #endregion #region Methods /// <summary> /// BIO Close Options /// </summary> public enum CloseOption { /// <summary> /// Don't close on free /// </summary> NoClose = 0, /// <summary> /// Close on freee /// </summary> Close = 1 } /// <summary> /// Calls BIO_set_close() /// </summary> /// <param name="opt"></param> public void SetClose(CloseOption opt) { Native.BIO_set_close(this.ptr, (int)opt); } /// <summary> /// Calls BIO_push() /// </summary> /// <param name="bio"></param> public void Push(BIO bio) { Native.ExpectNonNull(Native.BIO_push(this.ptr, bio.Handle)); } /// <summary> /// Calls BIO_write() /// </summary> /// <param name="buf"></param> public void Write(byte[] buf) { if (Native.BIO_write(this.ptr, buf, buf.Length) != buf.Length) throw new OpenSslException(); } /// <summary> /// Calls BIO_write() /// </summary> /// <param name="buf"></param> /// <param name="len"></param> public void Write(byte[] buf, int len) { if (Native.BIO_write(this.ptr, buf, len) != len) throw new OpenSslException(); } /// <summary> /// Calls BIO_write() /// </summary> /// <param name="value"></param> public void Write(byte value) { byte[] buf = new byte[1]; buf[0] = value; Write(buf); } /// <summary> /// Calls BIO_write() /// </summary> /// <param name="value"></param> public void Write(ushort value) { MemoryStream ms = new MemoryStream(); BinaryWriter br = new BinaryWriter(ms); br.Write(value); byte[] buf = ms.ToArray(); Write(buf); } /// <summary> /// Calls BIO_write() /// </summary> /// <param name="value"></param> public void Write(uint value) { MemoryStream ms = new MemoryStream(); BinaryWriter br = new BinaryWriter(ms); br.Write(value); byte[] buf = ms.ToArray(); Write(buf); } /// <summary> /// Calls BIO_puts() /// </summary> /// <param name="str"></param> public void Write(string str) { byte[] buf = Encoding.ASCII.GetBytes(str); if (Native.BIO_puts(this.ptr, buf) != buf.Length) throw new OpenSslException(); } /// <summary> /// Calls BIO_read() /// </summary> /// <param name="count"></param> /// <returns></returns> public ArraySegment<byte> ReadBytes(int count) { byte[] buf = new byte[count]; int ret = Native.BIO_read(this.ptr, buf, buf.Length); if (ret < 0) throw new Exception("Expected " + count + " bytes but received " + ret); return new ArraySegment<byte>(buf, 0, ret); } /// <summary> /// Calls BIO_gets() /// </summary> /// <returns></returns> public string ReadString() { StringBuilder sb = new StringBuilder(); const int BLOCK_SIZE = 64; byte[] buf = new byte[BLOCK_SIZE]; int ret = 0; while (true) { ret = Native.BIO_gets(this.ptr, buf, buf.Length); if (ret == 0) break; if (ret < 0) throw new OpenSslException(); sb.Append(Encoding.ASCII.GetString(buf, 0, ret)); } return sb.ToString(); } /// <summary> /// Returns the MessageDigestContext if this BIO's type if BIO_f_md() /// </summary> /// <returns></returns> public MessageDigestContext GetMessageDigestContext() { return new MessageDigestContext(this); } #endregion #region Overrides /// <summary> /// Calls BIO_free() /// </summary> protected override void OnDispose() { Native.BIO_free(this.ptr); } #endregion } }
// Python Tools for Visual Studio // Copyright(c) Microsoft Corporation // All rights reserved. // // Licensed under the Apache License, Version 2.0 (the License); you may not use // this file except in compliance with the License. You may obtain a copy of the // License at http://www.apache.org/licenses/LICENSE-2.0 // // THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS // OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY // IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, // MERCHANTABLITY OR NON-INFRINGEMENT. // // See the Apache Version 2.0 License for specific language governing // permissions and limitations under the License. using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.IO.Compression; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.PythonTools.Analysis; using Microsoft.PythonTools.Infrastructure; using Microsoft.PythonTools.Interpreter.Ast; namespace Microsoft.PythonTools.Interpreter.Default { class CPythonInterpreter : IPythonInterpreter { readonly Version _langVersion; private PythonInterpreterFactoryWithDatabase _factory; private PythonTypeDatabase _typeDb, _searchPathDb; private PythonAnalyzer _state; private IReadOnlyList<string> _searchPaths; private Dictionary<string, HashSet<string>> _zipPackageCache; public CPythonInterpreter(PythonInterpreterFactoryWithDatabase factory) { _langVersion = factory.Configuration.Version; _factory = factory; _typeDb = _factory.GetCurrentDatabase(); _factory.NewDatabaseAvailable += OnNewDatabaseAvailable; } private async void OnNewDatabaseAvailable(object sender, EventArgs e) { var factory = _factory; if (factory == null) { // We have been disposed already, so ignore this event return; } _typeDb = factory.GetCurrentDatabase(); _searchPathDb = null; _zipPackageCache = null; ModuleNamesChanged?.Invoke(this, EventArgs.Empty); } #region IPythonInterpreter Members public IPythonType GetBuiltinType(BuiltinTypeId id) { if (id == BuiltinTypeId.Unknown) { return null; } if (_typeDb == null) { throw new KeyNotFoundException(string.Format("{0} ({1})", id, (int)id)); } var name = SharedDatabaseState.GetBuiltinTypeName(id, _typeDb.LanguageVersion); var res = _typeDb.BuiltinModule.GetAnyMember(name) as IPythonType; if (res == null) { throw new KeyNotFoundException(string.Format("{0} ({1})", id, (int)id)); } return res; } public IList<string> GetModuleNames() { var fromDb = (_typeDb?.GetModuleNames()).MaybeEnumerate().ToList(); fromDb.AddRange((_searchPathDb?.GetModuleNames()).MaybeEnumerate()); // TODO: Return list of not-yet-imported modules from search paths? return fromDb; } public IPythonModule ImportModule(string name) { var mod = _typeDb?.GetModule(name); if (mod == null) { mod = _searchPathDb?.GetModule(name); if (mod == null) { foreach (var searchPath in _searchPaths.MaybeEnumerate()) { try { if (File.Exists(searchPath)) { mod = LoadModuleFromZipFile(searchPath, name); } else if (Directory.Exists(searchPath)) { mod = LoadModuleFromDirectory(searchPath, name); } } catch (ArgumentException) { return null; } if (mod != null) { break; } } } } return mod; } private void EnsureSearchPathDB() { if (_searchPathDb == null) { _searchPathDb = new PythonTypeDatabase(_factory, innerDatabase: _typeDb); } } private IPythonModule LoadModuleFromDirectory(string searchPath, string moduleName) { Func<string, bool> isPackage = null; if (!ModulePath.PythonVersionRequiresInitPyFiles(_langVersion)) { isPackage = Directory.Exists; } ModulePath package; try { package = ModulePath.FromBasePathAndName(searchPath, moduleName, isPackage); } catch (ArgumentException) { return null; } EnsureSearchPathDB(); if (package.IsNativeExtension || package.IsCompiled) { _searchPathDb.LoadExtensionModule(package); } else { _searchPathDb.AddModule(package.FullName, AstPythonModule.FromFile( this, package.SourceFile, _factory.GetLanguageVersion() )); } var mod = _searchPathDb.GetModule(package.FullName); if (!package.IsSpecialName) { int i = package.FullName.LastIndexOf('.'); if (i >= 1) { var parent = package.FullName.Remove(i); var parentMod = _searchPathDb.GetModule(parent) as AstPythonModule; if (parentMod != null) { parentMod.AddChildModule(package.Name, mod); } } } return mod; } class GetModuleCallable { private readonly HashSet<string> _packages; public GetModuleCallable(HashSet<string> packages) { _packages = packages; } public string GetModule(string basePath, string lastBit) { var candidate = Path.Combine(basePath, lastBit, "__init__.py"); if (_packages.Contains(candidate)) { return candidate; } candidate = Path.Combine(basePath, Path.ChangeExtension(lastBit, ".py")); if (_packages.Contains(candidate)) { return candidate; } candidate = Path.Combine(basePath, Path.ChangeExtension(lastBit, ".pyw")); if (_packages.Contains(candidate)) { return candidate; } return null; } } private IPythonModule LoadModuleFromZipFile(string zipFile, string moduleName) { ModulePath name; HashSet<string> packages = null; var cache = _zipPackageCache; if (cache == null) { cache = _zipPackageCache = new Dictionary<string, HashSet<string>>(); } if (!cache.TryGetValue(zipFile, out packages) || packages == null) { using (var stream = new FileStream(zipFile, FileMode.Open, FileAccess.Read)) using (var zip = new ZipArchive(stream, ZipArchiveMode.Read, true)) { cache[zipFile] = packages = new HashSet<string>( zip.Entries.Select(e => e.FullName.Replace('/', '\\')) ); } } try { name = ModulePath.FromBasePathAndName( "", moduleName, packages.Contains, new GetModuleCallable(packages).GetModule ); } catch (ArgumentException) { return null; } using (var stream = new FileStream(zipFile, FileMode.Open, FileAccess.Read)) using (var zip = new ZipArchive(stream, ZipArchiveMode.Read, true)) using (var sourceStream = zip.GetEntry(name.SourceFile.Replace('\\', '/'))?.Open()) { if (sourceStream == null) { return null; } return AstPythonModule.FromStream( this, sourceStream, PathUtils.GetAbsoluteFilePath(zipFile, name.SourceFile), _factory.GetLanguageVersion() ); } } public IModuleContext CreateModuleContext() { return null; } public void Initialize(PythonAnalyzer state) { if (_state != null) { _state.SearchPathsChanged -= PythonAnalyzer_SearchPathsChanged; } _state = state; if (_state != null) { _state.SearchPathsChanged += PythonAnalyzer_SearchPathsChanged; PythonAnalyzer_SearchPathsChanged(_state, EventArgs.Empty); } } private void PythonAnalyzer_SearchPathsChanged(object sender, EventArgs e) { _searchPaths = _state.GetSearchPaths(); _searchPathDb = null; _zipPackageCache = null; ModuleNamesChanged?.Invoke(this, EventArgs.Empty); } public event EventHandler ModuleNamesChanged; #endregion public void Dispose() { _searchPathDb = null; _zipPackageCache = null; _typeDb = null; var factory = _factory; _factory = null; if (factory != null) { factory.NewDatabaseAvailable -= OnNewDatabaseAvailable; } } } }
#region License // /* // See license included in this library folder. // */ #endregion using System; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Runtime.CompilerServices; using Sqloogle.Libs.NLog.Common; using Sqloogle.Libs.NLog.Config; using Sqloogle.Libs.NLog.Internal; namespace Sqloogle.Libs.NLog { /// <summary> /// Creates and manages instances of <see cref="T:Sqloogle.Libs.NLog.Logger" /> objects. /// </summary> public sealed class LogManager { private static readonly LogFactory globalFactory = new LogFactory(); #if !NET_CF && !SILVERLIGHT && !MONO /// <summary> /// Initializes static members of the LogManager class. /// </summary> [SuppressMessage("Microsoft.Performance", "CA1810:InitializeReferenceTypeStaticFieldsInline", Justification = "Significant logic in .cctor()")] static LogManager() { try { SetupTerminationEvents(); } catch (Exception exception) { if (exception.MustBeRethrown()) { throw; } InternalLogger.Warn("Error setting up termiation events: {0}", exception); } } #endif /// <summary> /// Prevents a default instance of the LogManager class from being created. /// </summary> private LogManager() { } /// <summary> /// Occurs when logging <see cref="Configuration" /> changes. /// </summary> public static event EventHandler<LoggingConfigurationChangedEventArgs> ConfigurationChanged { add { globalFactory.ConfigurationChanged += value; } remove { globalFactory.ConfigurationChanged -= value; } } #if !NET_CF && !SILVERLIGHT /// <summary> /// Occurs when logging <see cref="Configuration" /> gets reloaded. /// </summary> public static event EventHandler<LoggingConfigurationReloadedEventArgs> ConfigurationReloaded { add { globalFactory.ConfigurationReloaded += value; } remove { globalFactory.ConfigurationReloaded -= value; } } #endif /// <summary> /// Gets or sets a value indicating whether NLog should throw exceptions. /// By default exceptions are not thrown under any circumstances. /// </summary> public static bool ThrowExceptions { get { return globalFactory.ThrowExceptions; } set { globalFactory.ThrowExceptions = value; } } /// <summary> /// Gets or sets the current logging configuration. /// </summary> public static LoggingConfiguration Configuration { get { return globalFactory.Configuration; } set { globalFactory.Configuration = value; } } /// <summary> /// Gets or sets the global log threshold. Log events below this threshold are not logged. /// </summary> public static LogLevel GlobalThreshold { get { return globalFactory.GlobalThreshold; } set { globalFactory.GlobalThreshold = value; } } #if !NET_CF /// <summary> /// Gets the logger named after the currently-being-initialized class. /// </summary> /// <returns>The logger.</returns> /// <remarks> /// This is a slow-running method. /// Make sure you're not doing this in a loop. /// </remarks> [MethodImpl(MethodImplOptions.NoInlining)] public static Logger GetCurrentClassLogger() { #if SILVERLIGHT StackFrame frame = new StackTrace().GetFrame(1); #else var frame = new StackFrame(1, false); #endif return globalFactory.GetLogger(frame.GetMethod().DeclaringType.FullName); } /// <summary> /// Gets the logger named after the currently-being-initialized class. /// </summary> /// <param name="loggerType"> /// The logger class. The class must inherit from <see cref="Logger" />. /// </param> /// <returns>The logger.</returns> /// <remarks> /// This is a slow-running method. /// Make sure you're not doing this in a loop. /// </remarks> [MethodImpl(MethodImplOptions.NoInlining)] public static Logger GetCurrentClassLogger(Type loggerType) { #if SILVERLIGHT StackFrame frame = new StackTrace().GetFrame(1); #else var frame = new StackFrame(1, false); #endif return globalFactory.GetLogger(frame.GetMethod().DeclaringType.FullName, loggerType); } #endif /// <summary> /// Creates a logger that discards all log messages. /// </summary> /// <returns>Null logger which discards all log messages.</returns> public static Logger CreateNullLogger() { return globalFactory.CreateNullLogger(); } /// <summary> /// Gets the specified named logger. /// </summary> /// <param name="name">Name of the logger.</param> /// <returns> /// The logger reference. Multiple calls to <c>GetLogger</c> with the same argument aren't guaranteed to return the same logger reference. /// </returns> public static Logger GetLogger(string name) { return globalFactory.GetLogger(name); } /// <summary> /// Gets the specified named logger. /// </summary> /// <param name="name">Name of the logger.</param> /// <param name="loggerType"> /// The logger class. The class must inherit from <see cref="Logger" />. /// </param> /// <returns> /// The logger reference. Multiple calls to <c>GetLogger</c> with the same argument aren't guaranteed to return the same logger reference. /// </returns> public static Logger GetLogger(string name, Type loggerType) { return globalFactory.GetLogger(name, loggerType); } /// <summary> /// Loops through all loggers previously returned by GetLogger. /// and recalculates their target and filter list. Useful after modifying the configuration programmatically /// to ensure that all loggers have been properly configured. /// </summary> public static void ReconfigExistingLoggers() { globalFactory.ReconfigExistingLoggers(); } #if !SILVERLIGHT /// <summary> /// Flush any pending log messages (in case of asynchronous targets). /// </summary> public static void Flush() { globalFactory.Flush(); } /// <summary> /// Flush any pending log messages (in case of asynchronous targets). /// </summary> /// <param name="timeout">Maximum time to allow for the flush. Any messages after that time will be discarded.</param> public static void Flush(TimeSpan timeout) { globalFactory.Flush(timeout); } /// <summary> /// Flush any pending log messages (in case of asynchronous targets). /// </summary> /// <param name="timeoutMilliseconds">Maximum time to allow for the flush. Any messages after that time will be discarded.</param> public static void Flush(int timeoutMilliseconds) { globalFactory.Flush(timeoutMilliseconds); } #endif /// <summary> /// Flush any pending log messages (in case of asynchronous targets). /// </summary> /// <param name="asyncContinuation">The asynchronous continuation.</param> public static void Flush(AsyncContinuation asyncContinuation) { globalFactory.Flush(asyncContinuation); } /// <summary> /// Flush any pending log messages (in case of asynchronous targets). /// </summary> /// <param name="asyncContinuation">The asynchronous continuation.</param> /// <param name="timeout">Maximum time to allow for the flush. Any messages after that time will be discarded.</param> public static void Flush(AsyncContinuation asyncContinuation, TimeSpan timeout) { globalFactory.Flush(asyncContinuation, timeout); } /// <summary> /// Flush any pending log messages (in case of asynchronous targets). /// </summary> /// <param name="asyncContinuation">The asynchronous continuation.</param> /// <param name="timeoutMilliseconds">Maximum time to allow for the flush. Any messages after that time will be discarded.</param> public static void Flush(AsyncContinuation asyncContinuation, int timeoutMilliseconds) { globalFactory.Flush(asyncContinuation, timeoutMilliseconds); } /// <summary> /// Decreases the log enable counter and if it reaches -1 /// the logs are disabled. /// </summary> /// <remarks> /// Logging is enabled if the number of <see cref="EnableLogging" /> calls is greater /// than or equal to <see cref="DisableLogging" /> calls. /// </remarks> /// <returns> /// An object that iplements IDisposable whose Dispose() method /// reenables logging. To be used with C# <c>using ()</c> statement. /// </returns> public static IDisposable DisableLogging() { return globalFactory.DisableLogging(); } /// <summary>Increases the log enable counter and if it reaches 0 the logs are disabled.</summary> /// <remarks> /// Logging is enabled if the number of <see cref="EnableLogging" /> calls is greater /// than or equal to <see cref="DisableLogging" /> calls. /// </remarks> public static void EnableLogging() { globalFactory.EnableLogging(); } /// <summary> /// Returns <see langword="true" /> if logging is currently enabled. /// </summary> /// <returns> /// A value of <see langword="true" /> if logging is currently enabled, /// <see langword="false" /> otherwise. /// </returns> /// <remarks> /// Logging is enabled if the number of <see cref="EnableLogging" /> calls is greater /// than or equal to <see cref="DisableLogging" /> calls. /// </remarks> public static bool IsLoggingEnabled() { return globalFactory.IsLoggingEnabled(); } #if !NET_CF && !SILVERLIGHT && !MONO private static void SetupTerminationEvents() { AppDomain.CurrentDomain.ProcessExit += TurnOffLogging; AppDomain.CurrentDomain.DomainUnload += TurnOffLogging; } private static void TurnOffLogging(object sender, EventArgs args) { // reset logging configuration to null // this causes old configuration (if any) to be closed. InternalLogger.Info("Shutting down logging..."); Configuration = null; InternalLogger.Info("Logger has been shut down."); } #endif } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Reflection; using Xunit; namespace System.Linq.Expressions.Tests { public class OpAssign { [Theory] [PerCompilationType(nameof(AssignAndEquivalentMethods))] public void AssignmentEquivalents(MethodInfo nonAssign, MethodInfo assign, Type type, bool useInterpreter) { Func<Expression, Expression, Expression> withoutAssignment = (Func<Expression, Expression, Expression>)nonAssign.CreateDelegate(typeof(Func<Expression, Expression, Expression>)); Func<Expression, Expression, Expression> withAssignment = (Func<Expression, Expression, Expression>)assign.CreateDelegate(typeof(Func<Expression, Expression, Expression>)); foreach (object x in new[] { 0, -1, 1, 10 }.Select(i => Convert.ChangeType(i, type))) foreach (object y in new[] { -1, 1, 10 }.Select(i => Convert.ChangeType(i, type))) { ConstantExpression xExp = Expression.Constant(x); ConstantExpression yExp = Expression.Constant(y); Expression woAssign = withoutAssignment(xExp, yExp); ParameterExpression variable = Expression.Variable(type); Expression initAssign = Expression.Assign(variable, xExp); Expression assignment = withAssignment(variable, yExp); Expression wAssign = Expression.Block( new ParameterExpression[] { variable }, initAssign, assignment ); Assert.True(Expression.Lambda<Func<bool>>(Expression.Equal(woAssign, wAssign)).Compile(useInterpreter)()); LabelTarget target = Expression.Label(type); Expression wAssignReturningVariable = Expression.Block( new ParameterExpression[] { variable }, initAssign, assignment, Expression.Return(target, variable), Expression.Label(target, Expression.Default(type)) ); Assert.True(Expression.Lambda<Func<bool>>(Expression.Equal(woAssign, wAssignReturningVariable)).Compile(useInterpreter)()); } } private class Box<T> { public static T StaticValue { get; set; } public T Value { get; set; } public T this[int index] { get { return Value; } set { Value = value; } } public Box(T value) { Value = value; } } [Theory] [PerCompilationType(nameof(AssignAndEquivalentMethods))] public void AssignmentEquivalentsWithMemberAccess(MethodInfo nonAssign, MethodInfo assign, Type type, bool useInterpreter) { Func<Expression, Expression, Expression> withoutAssignment = (Func<Expression, Expression, Expression>)nonAssign.CreateDelegate(typeof(Func<Expression, Expression, Expression>)); Func<Expression, Expression, Expression> withAssignment = (Func<Expression, Expression, Expression>)assign.CreateDelegate(typeof(Func<Expression, Expression, Expression>)); foreach (object x in new[] { 0, -1, 1, 10 }.Select(i => Convert.ChangeType(i, type))) foreach (object y in new[] { -1, 1, 10 }.Select(i => Convert.ChangeType(i, type))) { ConstantExpression xExp = Expression.Constant(x); ConstantExpression yExp = Expression.Constant(y); Expression woAssign = withoutAssignment(xExp, yExp); Type boxType = typeof(Box<>).MakeGenericType(type); object box = boxType.GetConstructor(new[] { type }).Invoke(new object[] { x }); Expression boxExp = Expression.Constant(box); Expression property = Expression.Property(boxExp, boxType.GetProperty("Value")); Expression assignment = withAssignment(property, yExp); Assert.True(Expression.Lambda<Func<bool>>(Expression.Equal(woAssign, assignment)).Compile(useInterpreter)()); LabelTarget target = Expression.Label(type); box = boxType.GetConstructor(new[] { type }).Invoke(new object[] { x }); boxExp = Expression.Constant(box); property = Expression.Property(boxExp, boxType.GetProperty("Value")); assignment = withAssignment(property, yExp); Expression wAssignReturningVariable = Expression.Block( assignment, Expression.Return(target, property), Expression.Label(target, Expression.Default(type)) ); Assert.True(Expression.Lambda<Func<bool>>(Expression.Equal(woAssign, wAssignReturningVariable)).Compile(useInterpreter)()); } } [Theory, PerCompilationType(nameof(AssignAndEquivalentMethods))] public void AssignmentEquivalentsWithStaticMemberAccess(MethodInfo nonAssign, MethodInfo assign, Type type, bool useInterpreter) { Func<Expression, Expression, Expression> withoutAssignment = (Func<Expression, Expression, Expression>)nonAssign.CreateDelegate(typeof(Func<Expression, Expression, Expression>)); Func<Expression, Expression, Expression> withAssignment = (Func<Expression, Expression, Expression>)assign.CreateDelegate(typeof(Func<Expression, Expression, Expression>)); foreach (object x in new[] { 0, -1, 1, 10 }.Select(i => Convert.ChangeType(i, type))) foreach (object y in new[] { -1, 1, 10 }.Select(i => Convert.ChangeType(i, type))) { ConstantExpression xExp = Expression.Constant(x); ConstantExpression yExp = Expression.Constant(y); Expression woAssign = withoutAssignment(xExp, yExp); Type boxType = typeof(Box<>).MakeGenericType(type); PropertyInfo prop = boxType.GetProperty("StaticValue"); prop.SetValue(null, x); Expression property = Expression.Property(null, prop); Expression assignment = withAssignment(property, yExp); Assert.True(Expression.Lambda<Func<bool>>(Expression.Equal(woAssign, assignment)).Compile(useInterpreter)()); prop.SetValue(null, x); Expression wAssignReturningVariable = Expression.Block( assignment, property ); Assert.True(Expression.Lambda<Func<bool>>(Expression.Equal(woAssign, wAssignReturningVariable)).Compile(useInterpreter)()); } } [Theory] [PerCompilationType(nameof(AssignAndEquivalentMethods))] public void AssignmentEquivalentsWithIndexAccess(MethodInfo nonAssign, MethodInfo assign, Type type, bool useInterpreter) { Func<Expression, Expression, Expression> withoutAssignment = (Func<Expression, Expression, Expression>)nonAssign.CreateDelegate(typeof(Func<Expression, Expression, Expression>)); Func<Expression, Expression, Expression> withAssignment = (Func<Expression, Expression, Expression>)assign.CreateDelegate(typeof(Func<Expression, Expression, Expression>)); foreach (object x in new[] { 0, -1, 1, 10 }.Select(i => Convert.ChangeType(i, type))) foreach (object y in new[] { -1, 1, 10 }.Select(i => Convert.ChangeType(i, type))) { ConstantExpression xExp = Expression.Constant(x); ConstantExpression yExp = Expression.Constant(y); Expression woAssign = withoutAssignment(xExp, yExp); Type boxType = typeof(Box<>).MakeGenericType(type); object box = boxType.GetConstructor(new[] { type }).Invoke(new object[] { x }); Expression boxExp = Expression.Constant(box); Expression property = Expression.Property(boxExp, boxType.GetProperty("Item"), Expression.Constant(0)); Expression assignment = withAssignment(property, yExp); Assert.True(Expression.Lambda<Func<bool>>(Expression.Equal(woAssign, assignment)).Compile(useInterpreter)()); LabelTarget target = Expression.Label(type); box = boxType.GetConstructor(new[] { type }).Invoke(new object[] { x }); boxExp = Expression.Constant(box); property = Expression.Property(boxExp, boxType.GetProperty("Item"), Expression.Constant(0)); assignment = withAssignment(property, yExp); Expression wAssignReturningVariable = Expression.Block( assignment, Expression.Return(target, property), Expression.Label(target, Expression.Default(type)) ); Assert.True(Expression.Lambda<Func<bool>>(Expression.Equal(woAssign, wAssignReturningVariable)).Compile(useInterpreter)()); } } [Theory] [MemberData(nameof(AssignmentMethods))] public void AssignmentReducable(MethodInfo assign, Type type) { Func<Expression, Expression, Expression> withAssignment = (Func<Expression, Expression, Expression>)assign.CreateDelegate(typeof(Func<Expression, Expression, Expression>)); ParameterExpression variable = Expression.Variable(type); Expression assignment = withAssignment(variable, Expression.Default(type)); Assert.True(assignment.CanReduce); Assert.NotSame(assignment, assignment.ReduceAndCheck()); } [Theory] [MemberData(nameof(AssignmentMethods))] public void CannotAssignToNonWritable(MethodInfo assign, Type type) { Func<Expression, Expression, Expression> withAssignment = (Func<Expression, Expression, Expression>)assign.CreateDelegate(typeof(Func<Expression, Expression, Expression>)); AssertExtensions.Throws<ArgumentException>("left", () => withAssignment(Expression.Default(type), Expression.Default(type))); } [Theory] [MemberData(nameof(AssignmentMethods))] public void AssignmentWithMemberAccessReducable(MethodInfo assign, Type type) { Func<Expression, Expression, Expression> withAssignment = (Func<Expression, Expression, Expression>)assign.CreateDelegate(typeof(Func<Expression, Expression, Expression>)); Type boxType = typeof(Box<>).MakeGenericType(type); object box = boxType.GetConstructor(new[] { type }).Invoke(new object[] { Convert.ChangeType(0, type) }); Expression boxExp = Expression.Constant(box); Expression property = Expression.Property(boxExp, boxType.GetProperty("Value")); Expression assignment = withAssignment(property, Expression.Default(type)); Assert.True(assignment.CanReduce); Assert.NotSame(assignment, assignment.ReduceAndCheck()); } [Theory] [MemberData(nameof(AssignmentMethods))] public void AssignmentWithIndexAccessReducable(MethodInfo assign, Type type) { Func<Expression, Expression, Expression> withAssignment = (Func<Expression, Expression, Expression>)assign.CreateDelegate(typeof(Func<Expression, Expression, Expression>)); Type boxType = typeof(Box<>).MakeGenericType(type); object box = boxType.GetConstructor(new[] { type }).Invoke(new object[] { Convert.ChangeType(0, type) }); Expression boxExp = Expression.Constant(box); Expression property = Expression.Property(boxExp, boxType.GetProperty("Item"), Expression.Constant(0)); Expression assignment = withAssignment(property, Expression.Default(type)); Assert.True(assignment.CanReduce); Assert.NotSame(assignment, assignment.ReduceAndCheck()); } private static class Unreadable<T> { public static T WriteOnly { set { } } } [Theory] [MemberData(nameof(AssignmentMethods))] public static void ThrowsOnLeftUnreadable(MethodInfo assign, Type type) { Func<Expression, Expression, Expression> withAssignment = (Func<Expression, Expression, Expression>)assign.CreateDelegate(typeof(Func<Expression, Expression, Expression>)); Type unreadableType = typeof(Unreadable<>).MakeGenericType(type); Expression property = Expression.Property(null, unreadableType.GetProperty("WriteOnly")); AssertExtensions.Throws<ArgumentException>("left", () => withAssignment(property, Expression.Default(type))); } [Theory] [MemberData(nameof(AssignmentMethods))] public static void ThrowsOnRightUnreadable(MethodInfo assign, Type type) { Func<Expression, Expression, Expression> withAssignment = (Func<Expression, Expression, Expression>)assign.CreateDelegate(typeof(Func<Expression, Expression, Expression>)); Type unreadableType = typeof(Unreadable<>).MakeGenericType(type); Expression property = Expression.Property(null, unreadableType.GetProperty("WriteOnly")); Expression variable = Expression.Variable(type); AssertExtensions.Throws<ArgumentException>("right", () => withAssignment(variable, property)); } [Theory] [MemberData(nameof(AssignmentMethodsWithoutTypes))] public void ThrowIfNoSuchBinaryOperation(MethodInfo assign) { Func<Expression, Expression, Expression> withAssignment = (Func<Expression, Expression, Expression>)assign.CreateDelegate(typeof(Func<Expression, Expression, Expression>)); ParameterExpression variable = Expression.Variable(typeof(string)); Expression value = Expression.Default(typeof(string)); Assert.Throws<InvalidOperationException>(() => withAssignment(variable, value)); } public static IEnumerable<object[]> AssignmentMethods() { MethodInfo[] expressionMethods = typeof(Expression).GetMethods().Where(mi => mi.GetParameters().Length == 2).ToArray(); foreach (Tuple<string, string> names in AssignAndEquivalentMethodNames(true)) yield return new object[] { expressionMethods.First(mi => mi.Name == names.Item2), typeof(int) }; foreach (Tuple<string, string> names in AssignAndEquivalentMethodNames(false)) yield return new object[] { expressionMethods.First(mi => mi.Name == names.Item2), typeof(double) }; } public static IEnumerable<object[]> AssignmentMethodsWithoutTypes() { MethodInfo[] expressionMethods = typeof(Expression).GetMethods().Where(mi => mi.GetParameters().Length == 2).ToArray(); return AssignAndEquivalentMethodNames(true).Concat(AssignAndEquivalentMethodNames(false)) .Select(i => i.Item2) .Distinct() .Select(i => new object[] { expressionMethods.First(mi => mi.Name == i) }); } public static IEnumerable<object[]> AssignAndEquivalentMethods() { MethodInfo[] expressionMethods = typeof(Expression).GetMethods().Where(mi => mi.GetParameters().Length == 2).ToArray(); foreach (Tuple<string, string> names in AssignAndEquivalentMethodNames(true)) yield return new object[] { expressionMethods.First(mi => mi.Name == names.Item1), expressionMethods.First(mi => mi.Name == names.Item2), typeof(int) }; foreach (Tuple<string, string> names in AssignAndEquivalentMethodNames(false)) yield return new object[] { expressionMethods.First(mi => mi.Name == names.Item1), expressionMethods.First(mi => mi.Name == names.Item2), typeof(double) }; } public static IEnumerable<Tuple<string, string>> AssignAndEquivalentMethodNames(bool integral) { yield return Tuple.Create("Add", "AddAssign"); yield return Tuple.Create("AddChecked", "AddAssignChecked"); yield return Tuple.Create("Divide", "DivideAssign"); yield return Tuple.Create("Modulo", "ModuloAssign"); yield return Tuple.Create("Multiply", "MultiplyAssign"); yield return Tuple.Create("MultiplyChecked", "MultiplyAssignChecked"); yield return Tuple.Create("Subtract", "SubtractAssign"); yield return Tuple.Create("SubtractChecked", "SubtractAssignChecked"); if (integral) { yield return Tuple.Create("And", "AndAssign"); yield return Tuple.Create("ExclusiveOr", "ExclusiveOrAssign"); yield return Tuple.Create("LeftShift", "LeftShiftAssign"); yield return Tuple.Create("Or", "OrAssign"); yield return Tuple.Create("RightShift", "RightShiftAssign"); } else yield return Tuple.Create("Power", "PowerAssign"); } [Theory] [MemberData(nameof(ToStringData))] public static void ToStringTest(ExpressionType kind, string symbol, Type type) { BinaryExpression e = Expression.MakeBinary(kind, Expression.Parameter(type, "a"), Expression.Parameter(type, "b")); Assert.Equal($"(a {symbol} b)", e.ToString()); } public static IEnumerable<object[]> ToStringData() { return ToStringDataImpl().Select(t => new object[] { t.Item1, t.Item2, t.Item3 }); } private static IEnumerable<Tuple<ExpressionType, string, Type>> ToStringDataImpl() { yield return Tuple.Create(ExpressionType.AddAssign, "+=", typeof(int)); yield return Tuple.Create(ExpressionType.AddAssignChecked, "+=", typeof(int)); yield return Tuple.Create(ExpressionType.SubtractAssign, "-=", typeof(int)); yield return Tuple.Create(ExpressionType.SubtractAssignChecked, "-=", typeof(int)); yield return Tuple.Create(ExpressionType.MultiplyAssign, "*=", typeof(int)); yield return Tuple.Create(ExpressionType.MultiplyAssignChecked, "*=", typeof(int)); yield return Tuple.Create(ExpressionType.DivideAssign, "/=", typeof(int)); yield return Tuple.Create(ExpressionType.ModuloAssign, "%=", typeof(int)); yield return Tuple.Create(ExpressionType.PowerAssign, "**=", typeof(double)); yield return Tuple.Create(ExpressionType.LeftShiftAssign, "<<=", typeof(int)); yield return Tuple.Create(ExpressionType.RightShiftAssign, ">>=", typeof(int)); yield return Tuple.Create(ExpressionType.AndAssign, "&=", typeof(int)); yield return Tuple.Create(ExpressionType.AndAssign, "&&=", typeof(bool)); yield return Tuple.Create(ExpressionType.OrAssign, "|=", typeof(int)); yield return Tuple.Create(ExpressionType.OrAssign, "||=", typeof(bool)); yield return Tuple.Create(ExpressionType.ExclusiveOrAssign, "^=", typeof(int)); yield return Tuple.Create(ExpressionType.ExclusiveOrAssign, "^=", typeof(bool)); } private static IEnumerable<ExpressionType> AssignExpressionTypes { get { yield return ExpressionType.AddAssign; yield return ExpressionType.SubtractAssign; yield return ExpressionType.MultiplyAssign; yield return ExpressionType.AddAssignChecked; yield return ExpressionType.SubtractAssignChecked; yield return ExpressionType.MultiplyAssignChecked; yield return ExpressionType.DivideAssign; yield return ExpressionType.ModuloAssign; yield return ExpressionType.PowerAssign; yield return ExpressionType.AndAssign; yield return ExpressionType.OrAssign; yield return ExpressionType.RightShiftAssign; yield return ExpressionType.LeftShiftAssign; yield return ExpressionType.ExclusiveOrAssign; } } private static IEnumerable<Func<Expression, Expression, MethodInfo, BinaryExpression>> AssignExpressionMethodInfoUsingFactories { get { yield return Expression.AddAssign; yield return Expression.SubtractAssign; yield return Expression.MultiplyAssign; yield return Expression.AddAssignChecked; yield return Expression.SubtractAssignChecked; yield return Expression.MultiplyAssignChecked; yield return Expression.DivideAssign; yield return Expression.ModuloAssign; yield return Expression.PowerAssign; yield return Expression.AndAssign; yield return Expression.OrAssign; yield return Expression.RightShiftAssign; yield return Expression.LeftShiftAssign; yield return Expression.ExclusiveOrAssign; } } public static IEnumerable<object[]> AssignExpressionTypesArguments => AssignExpressionTypes.Select(t => new object[] {t}); public static IEnumerable<object[]> AssignExpressionMethodInfoUsingFactoriesArguments = AssignExpressionMethodInfoUsingFactories.Select(f => new object[] {f}); private static IEnumerable<LambdaExpression> NonUnaryLambdas { get { yield return Expression.Lambda<Action>(Expression.Empty()); Expression<Func<int, int, int>> exp = (x, y) => x + y; yield return exp; } } public static IEnumerable<object[]> AssignExpressionTypesAndNonUnaryLambdas => AssignExpressionTypes.SelectMany(t => NonUnaryLambdas, (t, l) => new object[] {t, l}); private static IEnumerable<LambdaExpression> NonIntegerReturnUnaryIntegerLambdas { get { ParameterExpression param = Expression.Parameter(typeof(int)); yield return Expression.Lambda<Action<int>>(Expression.Empty(), param); Expression<Func<int, long>> convL = x => x; yield return convL; Expression<Func<int, string>> toString = x => x.ToString(); yield return toString; } } public static IEnumerable<object[]> AssignExpressionTypesAndNonIntegerReturnUnaryIntegerLambdas => AssignExpressionTypes.SelectMany(t => NonIntegerReturnUnaryIntegerLambdas, (t, l) => new object[] {t, l}); private static IEnumerable<LambdaExpression> NonIntegerTakingUnaryIntegerReturningLambda { get { Expression<Func<long, int>> fromL = x => (int)x; yield return fromL; Expression<Func<string, int>> fromS = x => x.Length; yield return fromS; } } public static IEnumerable<object[]> AssignExpressionTypesAndNonIntegerTakingUnaryIntegerReturningLambda => AssignExpressionTypes.SelectMany( t => NonIntegerTakingUnaryIntegerReturningLambda, (t, l) => new object[] {t, l}); [Theory, MemberData(nameof(AssignExpressionTypesArguments))] public void CannotHaveConversionOnAssignWithoutMethod(ExpressionType type) { var lhs = Expression.Variable(typeof(int)); var rhs = Expression.Constant(0); Expression<Func<int, int>> identity = x => x; Assert.Throws<InvalidOperationException>(() => Expression.MakeBinary(type, lhs, rhs, false, null, identity)); Assert.Throws<InvalidOperationException>(() => Expression.MakeBinary(type, lhs, rhs, true, null, identity)); } public static int FiftyNinthBear(int x, int y) { // Ensure numbers add up to 40. Then ignore that and return 59. if (x + y != 40) throw new ArgumentException(); return 59; } [Theory, PerCompilationType(nameof(AssignExpressionTypesArguments))] public void ConvertAssignment(ExpressionType type, bool useInterpreter) { var lhs = Expression.Parameter(typeof(int)); var rhs = Expression.Constant(25); Expression<Func<int, int>> doubleIt = x => 2 * x; var lambda = Expression.Lambda<Func<int, int>>( Expression.MakeBinary(type, lhs, rhs, false, GetType().GetMethod(nameof(FiftyNinthBear)), doubleIt), lhs ); var func = lambda.Compile(useInterpreter); Assert.Equal(118, func(15)); } [Theory, MemberData(nameof(AssignExpressionTypesAndNonUnaryLambdas))] public void ConversionMustBeUnary(ExpressionType type, LambdaExpression conversion) { var lhs = Expression.Parameter(typeof(int)); var rhs = Expression.Constant(25); MethodInfo meth = GetType().GetMethod(nameof(FiftyNinthBear)); AssertExtensions.Throws<ArgumentException>( "conversion", () => Expression.MakeBinary(type, lhs, rhs, false, meth, conversion)); } [Theory, MemberData(nameof(AssignExpressionTypesAndNonIntegerReturnUnaryIntegerLambdas))] public void ConversionMustConvertToLHSType(ExpressionType type, LambdaExpression conversion) { var lhs = Expression.Parameter(typeof(int)); var rhs = Expression.Constant(25); MethodInfo meth = GetType().GetMethod(nameof(FiftyNinthBear)); Assert.Throws<InvalidOperationException>(() => Expression.MakeBinary(type, lhs, rhs, false, meth, conversion)); } [Theory, MemberData(nameof(AssignExpressionTypesAndNonIntegerTakingUnaryIntegerReturningLambda))] public void ConversionMustConvertFromRHSType(ExpressionType type, LambdaExpression conversion) { var lhs = Expression.Parameter(typeof(int)); var rhs = Expression.Constant(25); MethodInfo meth = GetType().GetMethod(nameof(FiftyNinthBear)); Assert.Throws<InvalidOperationException>(() => Expression.MakeBinary(type, lhs, rhs, false, meth, conversion)); } private class AddsToSomethingElse : IEquatable<AddsToSomethingElse> { public int Value { get; } public AddsToSomethingElse(int value) { Value = value; } public static int operator +(AddsToSomethingElse x, AddsToSomethingElse y) => x.Value + y.Value; public bool Equals(AddsToSomethingElse other) => Value == other?.Value; public override bool Equals(object obj) => Equals(obj as AddsToSomethingElse); public override int GetHashCode() => Value; } private static string StringAddition(int x, int y) => (x + y).ToString(); [Fact] public void CannotAssignOpIfOpReturnNotAssignable() { var lhs = Expression.Parameter(typeof(AddsToSomethingElse)); var rhs = Expression.Constant(new AddsToSomethingElse(3)); AssertExtensions.Throws<ArgumentException>(null, () => Expression.AddAssign(lhs, rhs)); } [Theory, ClassData(typeof(CompilationTypes))] public void CanAssignOpIfOpReturnNotAssignableButConversionFixes(bool useInterpreter) { var lhs = Expression.Parameter(typeof(AddsToSomethingElse)); var rhs = Expression.Constant(new AddsToSomethingElse(3)); Expression<Func<int, AddsToSomethingElse>> conversion = x => new AddsToSomethingElse(x); var exp = Expression.Lambda<Func<AddsToSomethingElse, AddsToSomethingElse>>( Expression.AddAssign(lhs, rhs, null, conversion), lhs ); var func = exp.Compile(useInterpreter); Assert.Equal(new AddsToSomethingElse(10), func(new AddsToSomethingElse(7))); } [Theory, PerCompilationType(nameof(AssignExpressionTypesArguments))] public void ConvertOpAssignToMember(ExpressionType type, bool useInterpreter) { Box<int> box = new Box<int>(25); Expression<Func<int, int>> doubleIt = x => x * 2; var exp = Expression.Lambda<Func<int>>( Expression.MakeBinary( type, Expression.Property(Expression.Constant(box), "Value"), Expression.Constant(15), false, GetType().GetMethod(nameof(FiftyNinthBear)), doubleIt ) ); var act = exp.Compile(useInterpreter); Assert.Equal(118, act()); Assert.Equal(118, box.Value); } [Theory, PerCompilationType(nameof(AssignExpressionTypesArguments))] public void ConvertOpAssignToArrayIndex(ExpressionType type, bool useInterpreter) { int[] array = {0, 0, 25, 0}; Expression<Func<int, int>> doubleIt = x => x * 2; var exp = Expression.Lambda<Func<int>>( Expression.MakeBinary( type, Expression.ArrayAccess(Expression.Constant(array), Expression.Constant(2)), Expression.Constant(15), false, GetType().GetMethod(nameof(FiftyNinthBear)), doubleIt ) ); var act = exp.Compile(useInterpreter); Assert.Equal(118, act()); Assert.Equal(118, array[2]); } private delegate int ByRefInts(ref int x, int y); private delegate int BothByRefInts(ref int x, ref int y); [Theory, PerCompilationType(nameof(AssignExpressionMethodInfoUsingFactoriesArguments))] public void MethodNoConvertOpWriteByRefParameter(Func<Expression, Expression, MethodInfo, BinaryExpression> factory, bool useInterpreter) { var pX = Expression.Parameter(typeof(int).MakeByRefType()); var pY = Expression.Parameter(typeof(int)); var exp = Expression.Lambda<ByRefInts>(factory(pX, pY, GetType().GetMethod(nameof(FiftyNinthBear))), pX, pY); var del = exp.Compile(useInterpreter); int arg = 5; Assert.Equal(59, del(ref arg, 35)); Assert.Equal(59, arg); } private delegate AddsToSomethingElse ByRefSomeElse(ref AddsToSomethingElse x, AddsToSomethingElse y); [Theory, ClassData(typeof(CompilationTypes))] public void ConvertOpWriteByRefParameterOverloadedOperator(bool useInterpreter) { var pX = Expression.Parameter(typeof(AddsToSomethingElse).MakeByRefType()); var pY = Expression.Parameter(typeof(AddsToSomethingElse)); Expression<Func<int, AddsToSomethingElse>> conv = x => new AddsToSomethingElse(x); var exp = Expression.Lambda<ByRefSomeElse>(Expression.AddAssign(pX, pY, null, conv), pX, pY); var del = exp.Compile(useInterpreter); AddsToSomethingElse arg = new AddsToSomethingElse(5); AddsToSomethingElse result = del(ref arg, new AddsToSomethingElse(35)); Assert.Equal(result, arg); } } }
using System; using System.Collections.Generic; using System.IO; using System.Runtime.InteropServices.ComTypes; using System.Runtime.InteropServices; using System.Diagnostics; using System.Globalization; using System.Linq; using MCEBuddy.BaseClasses; using MCEBuddy.BaseClasses.DSHelper; using MCEBuddy.BaseClasses.DirectShow; using MCEBuddy.Globals; using MCEBuddy.Util; using MCEBuddy.Configuration; namespace MCEBuddy.RemuxMediaCenter { public class ExtractWithGraph : IDisposable { #region Definitions [Flags] public enum ExtractMediaType { None = 0x0, Audio = 0x1, Video = 0x2, Subtitle = 0x4 } [DllImport("kernel32.dll", CharSet = CharSet.Auto)] internal static extern IntPtr LoadLibrary(string lpFileName); [DllImport("kernel32.dll", CharSet = CharSet.Ansi, ExactSpelling = true, SetLastError = true)] internal static extern IntPtr GetProcAddress(IntPtr hModule, string lpProcName); [UnmanagedFunctionPointer(CallingConvention.StdCall)] public delegate uint DllGetClassObject( [MarshalAs(UnmanagedType.LPStruct)] Guid rclsid, [MarshalAs(UnmanagedType.LPStruct)] Guid riid, [MarshalAs(UnmanagedType.IUnknown, IidParameterIndex=1)] out object ppv ); [ComImport, ComVisible(false), InterfaceType(ComInterfaceType.InterfaceIsIUnknown), Guid("00000001-0000-0000-C000-000000000046")] public interface IClassFactory { void CreateInstance( [MarshalAs(UnmanagedType.IUnknown)] object pUnkOuter, [MarshalAs(UnmanagedType.LPStruct)] Guid riid, [MarshalAs(UnmanagedType.IUnknown, IidParameterIndex = 1)] out object ppvObject); } #endregion private const double INFINITE_LOOP_CHECK_THRESHOLD = 1.5; // Max size of extracted stream private Guid CLSID_DVRMSDecryptTag = new Guid("{C4C4C4F2-0049-4E2B-98FB-9537F6CE516D}"); private Guid CLSID_WTVDecryptTag = new Guid("{09144FD6-BB29-11DB-96F1-005056C00008}"); private Guid CLSID_DumpFilter = new Guid("{60DF815A-784A-4725-8493-C42B166F9D92}"); //private Guid CLSID_SubtitleDecoder = new Guid("{212690FB-83E5-4526-8FD7-74478B7939CD}"); //private Guid CLSID_StreamBufferSourceFilter = new Guid("{C9F5FE02-F851-4EB5-99EE-AD602AF1E619}"); //private Guid CLSID_TivoSourceFilter = new Guid("{A65FA79B-2D2C-42BD-BAB2-D474B8F01248}"); //private Guid CLSID_MainConceptDeMultiplexer = new Guid("{136DCBF5-3874-4B70-AE3E-15997D6334F7}"); private string _SourceFile = ""; private JobStatus _jobStatus; private Log _jobLog; private ExtractMediaType _extractMediaType = ExtractMediaType.None; private string _workPath = ""; private Guid _CLSI_Decryptor; private string _Ext = ""; private string _VideoPart = ""; private List<string> _AudioParts = new List<string>(); private List<string> _SubtitleParts = new List<string>(); private bool _SuccessfulExtraction = false; private FilterGraph _fg; private IGraphBuilder _gb; IBaseFilter _SourceF; private int _gbFiltersCount = 0; public bool SuccessfulExtraction { get { return _SuccessfulExtraction; } } public string VideoPart { get { return _VideoPart; } } public List<string> AudioParts { get { return _AudioParts; } } public List<string> SubtitleParts { get { return _SubtitleParts; } } public ExtractWithGraph(string SourceFile, string workPath, ExtractMediaType mediaType, JobStatus jobStatus, Log jobLog) { _jobStatus = jobStatus; _extractMediaType = mediaType; _SourceFile = SourceFile; _workPath = workPath; _jobLog = jobLog; _Ext = FilePaths.CleanExt(SourceFile).Replace(".", ""); //Set the decryptor type depending on the file type DVR-MS or WTV or TIVO if (_Ext == "dvr-ms") _CLSI_Decryptor = CLSID_DVRMSDecryptTag; else if (_Ext == "wtv") _CLSI_Decryptor = CLSID_WTVDecryptTag; // Set up base graph _fg = new FilterGraph(); _gb = (IGraphBuilder)_fg; } private void checkHR(int hr) { if (hr < 0) { HRESULT.ThrowExceptionForHR(hr); } } private bool FAILED(int hr) { if (hr < 0) return true; else return false; } private bool SUCCEEDED(int hr) { if (hr < 0) return false; else return true; } private string GetFullPathWithoutExtension(string path) { return Path.Combine(Path.GetDirectoryName(path), Path.GetFileNameWithoutExtension(path)); } /// <summary> /// Find all the immediate upstream or downstream filters given a filter reference /// </summary> /// <param name="pFilter">Starting filter</param> /// <param name="Dir">Direction to search (upstream or downstream)</param> /// <param name="FilterList">Collect the results in this filter list</param> /// <returns>True if successful in getting filter chain</returns> private bool GetFilterChain(IBaseFilter pFilter, PinDirection Dir, List<IBaseFilter> FilterList) { int hr; IntPtr fetched = IntPtr.Zero; if (pFilter == null || FilterList == null) return false; IEnumPins pEnum; IPin[] pPin = new IPin[1]; hr = pFilter.EnumPins(out pEnum); if (FAILED(hr)) return false; while (pEnum.Next(pPin.Length, pPin, fetched) == 0) { // See if this pin matches the specified direction. PinDirection ThisPinDir; hr = pPin[0].QueryDirection(out ThisPinDir); if (FAILED(hr)) { // Something strange happened. return false; } if (ThisPinDir == Dir) { // Check if the pin is connected to another pin. IPin pPinNext; IntPtr ptr; hr = pPin[0].ConnectedTo(out ptr); if (SUCCEEDED(hr)) { // Get the filter that owns that pin. PinInfo PinInfo; pPinNext = (IPin)Marshal.GetObjectForIUnknown(ptr); hr = pPinNext.QueryPinInfo(out PinInfo); if (FAILED(hr) || (PinInfo.filter == null)) { // Something strange happened. return false; } // Insert the filter into the list. AddFilterUnique(FilterList, PinInfo.filter); // Go recursive through the filter chain GetFilterChain(PinInfo.filter, Dir, FilterList); } } } return true; } void AddFilterUnique(List<IBaseFilter> FilterList, IBaseFilter pNew) { if (pNew == null || FilterList == null) return; if (!FilterList.Contains(pNew)) FilterList.Add(pNew); return; } /* // TODO: We need to update this * WTV Files Pin Mapping (pin name between ||) * Audio -> Source Pin |DVR Out - 1| -> PBDA DT Filter |In(Enc/Tag)| |Out| -> Dump |Input| * Video -> Source Pin |DVR Out - 2| -> PBDA DT Filter |In(Enc/Tag)| |Out| -> Dump |Input| * Subtitle -> Source Pin |DVR Out - 5| -> PBDA DT Filter |In(Enc/Tag)| |Out| -> Dump |Input| * * DVRMS Files Pin Mapping (pin name between ||) * Audio -> Source Pin |DVR Out - 1| -> Decrypt/Tag Filter |In(Enc/Tag)| |Out| -> Dump |Input| * Video -> Source Pin |DVR Out - 3| -> Decrypt/Tag Filter |In(Enc/Tag)| |Out| -> Dump |Input| * Subtitle -> Source Pin |DVR Out - 2| -> Decrypt/Tag Filter |In(Enc/Tag)| |Out| -> Dump |Input| */ private void ConnectDecryptedDump(string sourceOutPinName, string DumpFileName) { int hr; Type comtype; IBaseFilter DecryptF; IPin PinOut, PinIn; //Create the decrypt filter if (_CLSI_Decryptor != MediaType.Null) { _jobLog.WriteEntry(this, "Connecting Decryption filter", Log.LogEntryType.Debug); comtype = Type.GetTypeFromCLSID(_CLSI_Decryptor); DecryptF = (IBaseFilter)Activator.CreateInstance(comtype); hr = _gb.AddFilter((IBaseFilter)DecryptF, "Decrypt" + _gbFiltersCount++.ToString(CultureInfo.InvariantCulture)); checkHR(hr); DecryptF.FindPin("In(Enc/Tag)", out PinIn); // Get the decrypt filter pinIn |In(Enc/Tag)| _SourceF.FindPin(sourceOutPinName, out PinOut); // Get the Source filter pinOut (name taken from sourceOutPinName) try { // Try to connect the decrypt filter if it is needed hr = _gb.ConnectDirect(PinOut, PinIn, null); // Connect the source filter pinOut to the decrypt filter pinIn checkHR(hr); DecryptF.FindPin("Out", out PinOut); // Get the Decrypt filter pinOut |Out| (for the next filter to connect to) } catch { // Otherwise go direct _SourceF.FindPin(sourceOutPinName, out PinOut); // Otherwise, go direct and get the source filter pinOut (name taken from sourceOutPinName) for the next filter to connect to } } else _SourceF.FindPin(sourceOutPinName, out PinOut); // Otherwise, go direct and get the source filter pinOut (name taken from sourceOutPinName) for the next filter to connect to // Check if we need a Video Subtitle decoder (Line 21) (here use the Microsoft DTV decoder) - the subtitles are embedded in the Video stream /*if (UseVideoSubtitleDecoder) { IBaseFilter SubtitleF; // TODO: We need to add TEE splitter here and a new DUMP filter here and connect the tee output to the DTV decoder and then Line21 to Dump otherwise we end up with either video or Line21, we want both _jobLog.WriteEntry(this, "Connecting Video Subtitle Extraction filter", Log.LogEntryType.Debug); comtype = Type.GetTypeFromCLSID(CLSID_SubtitleDecoder); SubtitleF = (IBaseFilter)Activator.CreateInstance(comtype); hr = _gb.AddFilter((IBaseFilter)SubtitleF, "Subtitle" + _gbFilters.Count.ToString(CultureInfo.InvariantCulture)); checkHR(hr); _gbFilters.Add(SubtitleF); // Keep track of filters to be released afterwards // Get the subtitle filter pinIn |Video Input| SubtitleF.FindPin("Video Input", out PinIn); // Try to connect the subtitle filter pinIn to the previous filter pinOut hr = _gb.ConnectDirect(PinOut, PinIn, null); checkHR(hr); SubtitleF.FindPin("~Line21 Output", out PinOut); // Get the new pinOut |~Line21 Output| from the subtitle filter for the next filter to connect to }*/ // Create the dump filter DumpFilter df = new DumpFilter(); // Add the filter to the graph hr = _gb.AddFilter(df, "Dump" + _gbFiltersCount++.ToString(CultureInfo.InvariantCulture)); checkHR(hr); // Set destination filename hr = df.SetFileName(DumpFileName, null); checkHR(hr); // Connect the dump filter pinIn |Input| to the previous filter pinOut _jobLog.WriteEntry(this, "Connecting MCEBuddy DumpStreams filter pins", Log.LogEntryType.Debug); hr = df.FindPin("Input", out PinIn); checkHR(hr); hr = _gb.ConnectDirect(PinOut, PinIn, null); checkHR(hr); _jobLog.WriteEntry(this, "All filters successfully connected", Log.LogEntryType.Debug); } /* * TIVO Files Pin Mapping (pin name between ||) (NOTE: XXXX changes from each machine and AC3 changes if the audio codec changes) * Audio -> Source Pin |Output| -> MainConcept MPEG DeMultiplexer |Input| |AC3 (PID XXXX @ Prog# 1)| -> Dump |Input| * Video -> Source Pin |Output| -> MainConcept MPEG DeMultiplexer |Input| |Video (PID XXXX @ Prog# 1)| -> Dump |Input| */ public void BuildGraph() { int hr; IntPtr fetched = IntPtr.Zero; IntPtr fetched2 = IntPtr.Zero; IEnumPins FilterPins; IPin[] pins = new IPin[1]; string PinID; // TiVO Directshow filters are only accessible through userspace otherwise decryption fails, so if we are running the engine as a service (instead of command line) we should prompt the user if ((_Ext == "tivo") && GlobalDefs.IsEngineRunningAsService) _jobLog.WriteEntry(this, "You need to start MCEBuddy engine as a Command line program. TiVO Desktop Directshow decryption filters do not work with a Windows Service.", Log.LogEntryType.Error); // Create the source filter for dvrms or wtv or TIVO (will automatically connect to TIVODecryptorTag in source itself) _jobLog.WriteEntry(this, "Loading file using DirectShow source filter", Log.LogEntryType.Debug); hr = _gb.AddSourceFilter(_SourceFile, "Source Filter", out _SourceF); checkHR(hr); // If this is a TIVO while, while the source filter automatically decrypts the inputs we need to connect the MPEG demultiplexer to get the audio and video output pins if (_Ext == "tivo") { IPin PinOut, PinIn; IntPtr ptr; PinInfo demuxPinInfo; List<IBaseFilter> filterList = new List<IBaseFilter>(); // Check if the source filter is a TiVO source filter (otherwise sometimes it tries to use the normal source filter which will fail since the stream in encrypted) string vendorInfo; FilterInfo filterInfo; _SourceF.QueryFilterInfo(out filterInfo); _SourceF.QueryVendorInfo(out vendorInfo); _jobLog.WriteEntry(this, "TiVO Source filter loaded by Directshow -> " + filterInfo.achName + " (" + vendorInfo + ")", Log.LogEntryType.Debug); if (vendorInfo == null || !vendorInfo.ToLower().Contains("tivo")) { string exception = ""; // Check if you are running 64Bit MCEBuddy, TiVO needs 32bit MCEBuddy since TiVO directshow dll are 32bit and can only be loaded by 32bit processes if (IntPtr.Size == 8) exception += "You need to run 32bit MCEBuddy, TiVO Directshow fiters cannot be accessed by a 64bit program."; else exception += "TiVO Desktop installation not detected by Windows DirectShow."; throw new Exception(exception); // Get out of here and let the parent know something is wrong } hr = _SourceF.FindPin("Output", out PinOut); // Get the Source filter pinOut |Output| checkHR(hr); // When TIVO desktop is installed, Render automatically builds the filter graph with the necessary demuxing filters - we cannot manually add the MainConcept demux filter since the class isn't registered but somehow Render is able to find it and load it (along with other redundant filters like DTV, audio etc which we need to remove) _jobLog.WriteEntry(this, "DirectShow building TiVO filter chain", Log.LogEntryType.Debug); hr = _gb.Render(PinOut); checkHR(hr); hr = PinOut.ConnectedTo(out ptr); // Find out which input Pin (Mainconcept Demux filter) the output of the Source Filter is connected to checkHR(hr); PinIn = (IPin)Marshal.GetObjectForIUnknown(ptr); hr = PinIn.QueryPinInfo(out demuxPinInfo); // Get the mainconcept demux filter from the pin checkHR(hr); demuxPinInfo.filter.QueryFilterInfo(out filterInfo); demuxPinInfo.filter.QueryVendorInfo(out vendorInfo); _jobLog.WriteEntry(this, "Checking downstream TiVO filter chain starting with TiVO Demux filter -> " + filterInfo.achName + " (" + vendorInfo + ")", Log.LogEntryType.Debug); if (!GetFilterChain(demuxPinInfo.filter, PinDirection.Output, filterList)) // Get the list of all downstreams (redudant) filters (like DTV, Audio, video render etc) from the demux filter that were added by the automatic Render function above (check if there are no downstream filters, then TIVO desktop is not installed) throw new Exception("Unable to get TIVO filter chain"); // Now remove all the filters in the chain downstream after the demux filter from the graph builder (we dont' need them, we will add out own filters later) _jobLog.WriteEntry(this, "Removing redundant filters from TiVO filter chain", Log.LogEntryType.Debug); foreach (IBaseFilter filter in filterList) { filter.QueryFilterInfo(out filterInfo); filter.QueryVendorInfo(out vendorInfo); _jobLog.WriteEntry(this, "Removing filter -> " + filterInfo.achName + " (" + vendorInfo + ")", Log.LogEntryType.Debug); _gb.RemoveFilter(filter); Marshal.FinalReleaseComObject(filter); // Release the COM object } // Now the TIVO MainConcept Demux Filter is our new "Source" filter _SourceF = demuxPinInfo.filter; } // TODO: We need to find a way to insert a filter which can allow us to select audio streams (e.g. LAV filter, currently it only allows us access to the default audio stream and not multiple audio streams) // Cycle through pins, connecting as appropriate hr = _SourceF.EnumPins(out FilterPins); checkHR(hr); while (FilterPins.Next(pins.Length, pins, fetched) == 0) { IntPtr ptypes = Marshal.AllocCoTaskMem(Marshal.SizeOf(typeof(IntPtr))); AMMediaType mtypes; IEnumMediaTypes enummtypes; IntPtr ptrEnum; pins[0].EnumMediaTypes(out ptrEnum); enummtypes = (IEnumMediaTypes)Marshal.GetObjectForIUnknown(ptrEnum); while (enummtypes.Next(1, ptypes, fetched2) == 0) { /* Extract Audio, Video or Subtitle streams -> References: * http://nate.deepcreek.org.au/svn/DigitalWatch/trunk/bin/MediaTypes.txt * http://msdn.microsoft.com/en-us/library/ms932033.aspx * https://sourceforge.net/p/tsubget/home/Dumping%20a%20Stream/ * http://msdn.microsoft.com/en-us/library/windows/desktop/dd695343(v=vs.85).aspx * http://msdn.microsoft.com/en-us/library/windows/desktop/dd390660(v=vs.85).aspx * http://msdn.microsoft.com/en-us/library/windows/desktop/dd407354(v=vs.85).aspx * http://whrl.pl/RcRv5p (extracting Teletext from WTV/DVRMS) */ IntPtr ptrStructure = Marshal.ReadIntPtr(ptypes); mtypes = (AMMediaType)Marshal.PtrToStructure(ptrStructure, typeof(AMMediaType)); if ((mtypes.majorType == MediaType.Video) || (mtypes.majorType == MediaType.Audio) || //(mtypes.majorType == MediaType.Mpeg2PES) || (mtypes.majorType == MediaType.Stream) || (mtypes.majorType == MediaType.AuxLine21Data) || (mtypes.majorType == MediaType.VBI) || (mtypes.majorType == MediaType.MSTVCaption) || (mtypes.majorType == MediaType.DTVCCData) || (mtypes.majorType == MediaType.Mpeg2Sections && mtypes.subType == MediaSubType.None && mtypes.formatType == FormatType.None)) { string DumpFileName = ""; if ((mtypes.majorType == MediaType.Video) && ((_extractMediaType & ExtractMediaType.Video) != 0)) // Video { DumpFileName = Path.Combine(_workPath, Path.GetFileNameWithoutExtension(_SourceFile) + "_VIDEO"); _VideoPart = DumpFileName; _jobLog.WriteEntry(this, "Found Video stream, extracting -> " + DumpFileName, Log.LogEntryType.Debug); } //else if (((mtypes.majorType == MediaType.Audio) || // Audio types https://msdn.microsoft.com/en-us/library/windows/desktop/dd390676(v=vs.85).aspx // ((mtypes.majorType == MediaType.Mpeg2PES) && ((mtypes.subType == MediaSubType.DolbyAC3) || (mtypes.subType == MediaSubType.DTS) || (mtypes.subType == MediaSubType.DvdLPCMAudio) || (mtypes.subType == MediaSubType.Mpeg2Audio))) || // ((mtypes.majorType == MediaType.Stream) && ((mtypes.subType == MediaSubType.DolbyAC3) || (mtypes.subType == MediaSubType.MPEG1Audio) || (mtypes.subType == MediaSubType.Mpeg2Audio) || (mtypes.subType == MediaSubType.DolbyDDPlus) || (mtypes.subType == MediaSubType.MpegADTS_AAC) || (mtypes.subType == MediaSubType.MpegLOAS))) // ) && // ((_extractMediaType & ExtractMediaType.Audio) != 0)) //{ // DumpFileName = Path.Combine(_workPath, Path.GetFileNameWithoutExtension(_SourceFile) + "_AUDIO" + AudioParts.Count.ToString()); // _AudioParts.Add(DumpFileName); // _jobLog.WriteEntry(this, "Found Audio stream, extracting -> " + DumpFileName, Log.LogEntryType.Debug); //} else if ((_extractMediaType & ExtractMediaType.Subtitle) != 0)// Subtitles { DumpFileName = Path.Combine(_workPath, Path.GetFileNameWithoutExtension(_SourceFile) + "_SUBTITLE" + SubtitleParts.Count.ToString()); SubtitleParts.Add(DumpFileName); _jobLog.WriteEntry(this, "Found Subtitle stream, extracting -> " + DumpFileName, Log.LogEntryType.Debug); } if (!String.IsNullOrWhiteSpace(DumpFileName)) // If we are asked to extract something { hr = pins[0].QueryId(out PinID); ConnectDecryptedDump(PinID, DumpFileName); } } else { // Debug - looking for more subtitle types (very poorly documented by Microsoft) Guid type = mtypes.majorType; Guid subtype = mtypes.subType; Guid formattyype = mtypes.formatType; } } Marshal.FreeCoTaskMem(ptypes); // Free up the memory } } public void RunGraph() { DateTime lastTick = DateTime.Now; int hangPeriod = MCEBuddyConf.GlobalMCEConfig.GeneralOptions.hangTimeout; long lastVideoSize = 0, lastAudioSize = 0, lastSubtitleSize = 0; long totalPartsSize = 0, sourceSize = 0; bool AbortError = false; int hr = 0; IMediaControl mediaControl = (IMediaControl)_fg; IMediaEvent mediaEvent = (IMediaEvent)_fg; hr = mediaControl.Run(); checkHR(hr); // Change the priority temporarily (need to reset it back after Dumping Streams) ProcessPriorityClass lastPriority = GlobalDefs.Priority; // Set it up Process.GetCurrentProcess().PriorityClass = GlobalDefs.Priority; // Set the CPU Priority IOPriority.SetPriority(GlobalDefs.IOPriority); // First set the CPU priority // Get filesize of source file sourceSize = Util.FileIO.FileSize(_SourceFile); // Sanity checking if (sourceSize <= 0) { _jobLog.WriteEntry(this, "Unable to get source file size, disabling infinite loop checking.", Log.LogEntryType.Warning); hangPeriod = 0; } bool stop = false, isSuspended = false; while (!stop) { System.Threading.Thread.Sleep(100); if (_jobStatus.Cancelled) { // Received a shutdown command external to the filter extraction _jobLog.WriteEntry(this, "Stream extraction cancelled, aborting graph.", Log.LogEntryType.Warning); stop = true; AbortError = true; mediaControl.Stop(); break; } if (isSuspended) lastTick = DateTime.Now; // Since during suspension there will be no output it shouldn't terminate the process if (!isSuspended && GlobalDefs.Pause) // Check if process has to be suspended (if not already) { _jobLog.WriteEntry(this, "Stream extraction paused", Log.LogEntryType.Information); mediaControl.Pause(); isSuspended = true; } if (isSuspended && !GlobalDefs.Pause) // Check if we need to resume the process { _jobLog.WriteEntry(this, "Stream extraction resumed", Log.LogEntryType.Information); isSuspended = false; mediaControl.Run(); } if (lastPriority != GlobalDefs.Priority) // Check if the priority was changed and if so update it { _jobLog.WriteEntry(this, "Stream extraction priority changed", Log.LogEntryType.Information); lastPriority = GlobalDefs.Priority; Process.GetCurrentProcess().PriorityClass = GlobalDefs.Priority; // Set the CPU Priority IOPriority.SetPriority(GlobalDefs.IOPriority); // First set the CPU priority } EventCode ev; IntPtr p1, p2; if (mediaEvent.GetEvent(out ev, out p1, out p2, 0) == 0) { if (ev == EventCode.Complete) { mediaControl.Stop(); stop = true; } else if (ev == EventCode.ErrorAbort || ev == EventCode.UserAbort || ev == EventCode.StErrStopped || ev == EventCode.ErrorAbortEx) { mediaControl.Stop(); stop = true; //AbortError = true; - some partial/corrupted files are errored out, we'll handle extraction errors later } mediaEvent.FreeEventParams(ev, p1, p2); } // Sanity checking to prevent infinite loop for extraction (sometimes steams extracts infinitely) // Check if the filesize exceed the initial file size and if so abort the operation if (sourceSize > 0) { totalPartsSize = 0; // Video file check if ((_extractMediaType & ExtractMediaType.Video) != 0) { long videoSize = Util.FileIO.FileSize(_VideoPart); if (videoSize < 0) _jobLog.WriteEntry(this, "Unable to get extracted video stream file size for infinite loop detection.", Log.LogEntryType.Warning); else if (videoSize > (sourceSize * INFINITE_LOOP_CHECK_THRESHOLD)) { _jobLog.WriteEntry(this, "Extracted video stream is greater than " + INFINITE_LOOP_CHECK_THRESHOLD.ToString(CultureInfo.InvariantCulture) + " times the source file size " + (sourceSize/ 1024).ToString("N", CultureInfo.InvariantCulture) + " [KB].\r\nExtraction likely hung, terminating streams extraction.", Log.LogEntryType.Error); stop = true; AbortError = true; mediaControl.Stop(); break; } if (hangPeriod > 0) if (videoSize > lastVideoSize) // If we have progress lastTick = DateTime.Now; totalPartsSize += videoSize; lastVideoSize = videoSize; } // Audio file check if ((_extractMediaType & ExtractMediaType.Audio) != 0) { foreach (string audioPart in _AudioParts) { long audioSize = Util.FileIO.FileSize(audioPart); if (audioSize < 0) _jobLog.WriteEntry(this, "Unable to get extracted audio stream file size for infinite loop detection.", Log.LogEntryType.Warning); else if (audioSize > (sourceSize * INFINITE_LOOP_CHECK_THRESHOLD)) { _jobLog.WriteEntry(this, "Extracted audio stream is greater than " + INFINITE_LOOP_CHECK_THRESHOLD.ToString(CultureInfo.InvariantCulture) + " times the source file size " + (sourceSize / 1024).ToString("N", CultureInfo.InvariantCulture) + " [KB].\r\nExtraction likely hung, terminating streams extraction.", Log.LogEntryType.Error); stop = true; AbortError = true; mediaControl.Stop(); break; } if (hangPeriod > 0) if (audioSize > lastAudioSize) // If we have progress lastTick = DateTime.Now; totalPartsSize += audioSize; lastAudioSize = audioSize; } } // Subtitle file check if ((_extractMediaType & ExtractMediaType.Subtitle) != 0) { foreach (string subtitlePart in _SubtitleParts) { long subtitleSize = Util.FileIO.FileSize(subtitlePart); if (subtitleSize < 0) _jobLog.WriteEntry(this, "Unable to get extracted subtitle stream file size for infinite loop detection.", Log.LogEntryType.Warning); else if (subtitleSize > (sourceSize * INFINITE_LOOP_CHECK_THRESHOLD)) { _jobLog.WriteEntry(this, "Extracted subtitle stream is greater than " + INFINITE_LOOP_CHECK_THRESHOLD.ToString(CultureInfo.InvariantCulture) + " times the source file size " + (sourceSize / 1024).ToString("N", CultureInfo.InvariantCulture) + " [KB].\r\nExtraction likely hung, terminating streams extraction.", Log.LogEntryType.Error); stop = true; AbortError = true; mediaControl.Stop(); break; } if (hangPeriod > 0) if (subtitleSize > lastSubtitleSize) // If we have progress lastTick = DateTime.Now; totalPartsSize += subtitleSize; lastSubtitleSize = subtitleSize; } } if (totalPartsSize < 0) totalPartsSize = 0; // Incase we get -ve numbers _jobStatus.PercentageComplete = (((float)totalPartsSize / (float)sourceSize) > 1 ? 100 : ((float)totalPartsSize / (float)sourceSize) * 100); // Calculate % complete from size estimation (since no recoding is happening) and cap at 100% _jobLog.WriteEntry(this, "Percentage complete : " + _jobStatus.PercentageComplete.ToString("0.00", CultureInfo.InvariantCulture) + " %", Log.LogEntryType.Debug); // Write to file } // Check if we have reached the end of the file or runs out of disk space, sometime windows just loops endlessly without any incremental output // TODO: Should we treat this as an error or normal processing if ((hangPeriod > 0) && (DateTime.Now > lastTick.AddSeconds(hangPeriod))) { _jobLog.WriteEntry("No response from stream extraction for " + hangPeriod + " seconds, process likely finished, continuing.", Log.LogEntryType.Warning); // Don't treat as an error for now stop = true; // AbortError = true; // Don't treat as an error for now mediaControl.Stop(); break; } } // Reset Priority to Normal IOPriority.SetPriority(GlobalDefs.EngineIOPriority); // Set CPU priority after restoring the scheduling priority Process.GetCurrentProcess().PriorityClass = GlobalDefs.EnginePriority; // Set the CPU Priority back to Above Normal (engine always runs above normal) List<string> parts = _AudioParts.Concat(_SubtitleParts).ToList(); // Create a list of all subtitle, audio parts if (!String.IsNullOrWhiteSpace(_VideoPart)) parts.Add(_VideoPart); // add video part _jobLog.WriteEntry(this, "Source " + _SourceFile + " filesize [KB] : " + (sourceSize / 1024).ToString("N", CultureInfo.InvariantCulture), Log.LogEntryType.Debug); // Write to file foreach (string part in parts) { _jobLog.WriteEntry(this, part + " extracted filesize [KB] : " + (FileIO.FileSize(part) / 1024).ToString("N", CultureInfo.InvariantCulture), Log.LogEntryType.Debug); // Write to file } _jobLog.WriteEntry(this, "Total extracted parts size [KB] : " + (totalPartsSize / 1024).ToString("N", CultureInfo.InvariantCulture), Log.LogEntryType.Debug); // Write to file if (!AbortError) _SuccessfulExtraction = true; } public void DeleteParts() { Util.FileIO.TryFileDelete(_VideoPart); foreach (string audioPart in _AudioParts) { Util.FileIO.TryFileDelete(audioPart); } foreach (string subtitlePart in _SubtitleParts) { Util.FileIO.TryFileDelete(subtitlePart); } } public void Dispose() { // Be goot - release the COM objects IEnumFilters filters; IntPtr fetched = IntPtr.Zero; int hr = _gb.EnumFilters(out filters); IBaseFilter[] baseFilters = new IBaseFilter[1]; while (filters.Next(baseFilters.Length, baseFilters, fetched) == 0) { FilterInfo info; baseFilters[0].QueryFilterInfo(out info); if (Marshal.IsComObject(baseFilters[0])) // Only release COM objects Marshal.FinalReleaseComObject(baseFilters[0]); } Marshal.FinalReleaseComObject(_gb); } } }
namespace Microsoft.Azure.Management.StorSimple8000Series { using Azure; using Management; using Rest; using Rest.Azure; using Rest.Azure.OData; using Models; using System.Collections; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; /// <summary> /// BackupsOperations operations. /// </summary> public partial interface IBackupsOperations { /// <summary> /// Retrieves all the backups in a device. /// </summary> /// <param name='deviceName'> /// The device name /// </param> /// <param name='resourceGroupName'> /// The resource group name /// </param> /// <param name='managerName'> /// The manager name /// </param> /// <param name='odataQuery'> /// OData parameters to apply to the operation. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<IPage<Backup>>> ListByDeviceWithHttpMessagesAsync(string deviceName, string resourceGroupName, string managerName, ODataQuery<BackupFilter> odataQuery = default(ODataQuery<BackupFilter>), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Deletes the backup. /// </summary> /// <param name='deviceName'> /// The device name /// </param> /// <param name='backupName'> /// The backup name. /// </param> /// <param name='resourceGroupName'> /// The resource group name /// </param> /// <param name='managerName'> /// The manager name /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse> DeleteWithHttpMessagesAsync(string deviceName, string backupName, string resourceGroupName, string managerName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Clones the backup element as a new volume. /// </summary> /// <param name='deviceName'> /// The device name /// </param> /// <param name='backupName'> /// The backup name. /// </param> /// <param name='backupElementName'> /// The backup element name. /// </param> /// <param name='parameters'> /// The clone request object. /// </param> /// <param name='resourceGroupName'> /// The resource group name /// </param> /// <param name='managerName'> /// The manager name /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse> CloneWithHttpMessagesAsync(string deviceName, string backupName, string backupElementName, CloneRequest parameters, string resourceGroupName, string managerName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Restores the backup on the device. /// </summary> /// <param name='deviceName'> /// The device name /// </param> /// <param name='backupName'> /// The backupSet name /// </param> /// <param name='resourceGroupName'> /// The resource group name /// </param> /// <param name='managerName'> /// The manager name /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse> RestoreWithHttpMessagesAsync(string deviceName, string backupName, string resourceGroupName, string managerName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Deletes the backup. /// </summary> /// <param name='deviceName'> /// The device name /// </param> /// <param name='backupName'> /// The backup name. /// </param> /// <param name='resourceGroupName'> /// The resource group name /// </param> /// <param name='managerName'> /// The manager name /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse> BeginDeleteWithHttpMessagesAsync(string deviceName, string backupName, string resourceGroupName, string managerName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Clones the backup element as a new volume. /// </summary> /// <param name='deviceName'> /// The device name /// </param> /// <param name='backupName'> /// The backup name. /// </param> /// <param name='backupElementName'> /// The backup element name. /// </param> /// <param name='parameters'> /// The clone request object. /// </param> /// <param name='resourceGroupName'> /// The resource group name /// </param> /// <param name='managerName'> /// The manager name /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse> BeginCloneWithHttpMessagesAsync(string deviceName, string backupName, string backupElementName, CloneRequest parameters, string resourceGroupName, string managerName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Restores the backup on the device. /// </summary> /// <param name='deviceName'> /// The device name /// </param> /// <param name='backupName'> /// The backupSet name /// </param> /// <param name='resourceGroupName'> /// The resource group name /// </param> /// <param name='managerName'> /// The manager name /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse> BeginRestoreWithHttpMessagesAsync(string deviceName, string backupName, string resourceGroupName, string managerName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Retrieves all the backups in a device. /// </summary> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<IPage<Backup>>> ListByDeviceNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); } }
using UnityEditor.Experimental.Rendering.TestFramework; using NUnit.Framework; using System; using UnityEngine.Rendering; using System.Linq; using System.Collections.Generic; using System.Text; namespace UnityEngine.Experimental.Rendering.HDPipeline.Tests { public class FrameSettingsTests { Object m_ToClean; [TearDown] public void TearDown() { if (m_ToClean != null) CoreUtils.Destroy(m_ToClean); FrameSettingsHistory.frameSettingsHistory.Clear(); } [Test] public void NoDoubleBitIndex() { var values = Enum.GetValues(typeof(FrameSettingsField)); var singleValues = (values as IEnumerable<int>).Distinct(); //gathering helpful debug info var messageDuplicates = new StringBuilder(); if (values.Length != singleValues.Count()) { var names = Enum.GetNames(typeof(FrameSettingsField)); for (int i = 0; i < values.Length - 1; ++i) { var a = values.GetValue(i); var b = values.GetValue(i + 1); if ((int)values.GetValue(i) == (int)values.GetValue(i + 1)) { messageDuplicates.AppendFormat("{{ {0}: {1}, {2}", (int)values.GetValue(i), names[i], names[i + 1]); ++i; while (values.GetValue(i) == values.GetValue(i + 1)) { if (values.GetValue(i) == values.GetValue(i + 1)) { messageDuplicates.AppendFormat(", {0}", names[i + 1]); ++i; } } messageDuplicates.Append(" }, "); } } } Assert.AreEqual(values.Length, singleValues.Count(), String.Format("Double bit index found: {0}\nNumber of bit index against number of distinct bit index:", messageDuplicates.ToString())); } // deactivate this test for template package making issue //[Test] public void FrameSettingsAggregation() { for (int i = 0; i < 10; ++i) { //init FrameSettings fs = default; FrameSettingsOverrideMask fso = default; FrameSettingsRenderType defaultFSType = RandomUtilities.RandomEnumValue<FrameSettingsRenderType>(i); FrameSettings defaultFS; FrameSettings result = FrameSettings.defaultCamera; FrameSettings tester = default; RenderPipelineSettings supportedFeatures = new RenderPipelineSettings(); switch (defaultFSType) { case FrameSettingsRenderType.Camera: defaultFS = FrameSettings.defaultCamera; break; case FrameSettingsRenderType.CustomOrBakedReflection: defaultFS = FrameSettings.defaultCustomOrBakeReflectionProbe; break; case FrameSettingsRenderType.RealtimeReflection: defaultFS = FrameSettings.defaultRealtimeReflectionProbe; break; default: throw new ArgumentException("Unknown FrameSettingsRenderType"); } //change randomly override values for (int j = 0; j < 10; ++j) { FrameSettingsField field = RandomUtilities.RandomEnumValue<FrameSettingsField>((i + 0.5f) * (j + 0.3f)); fs.SetEnabled(field, RandomUtilities.RandomBool((i + 1) * j)); fso.mask[(uint)field] = true; } //create and init gameobjects var go = new GameObject("TestObject"); m_ToClean = go; var cam = go.AddComponent<Camera>(); var add = cam.GetComponent<HDAdditionalCameraData>() ?? cam.gameObject.AddComponent<HDAdditionalCameraData>(); Assert.True(add != null && !add.Equals(null)); add.renderingPathCustomFrameSettings = fs; add.renderingPathCustomFrameSettingsOverrideMask = fso; add.defaultFrameSettings = defaultFSType; add.customRenderingSettings = true; //gather data two different ways FrameSettings.AggregateFrameSettings(ref result, cam, add, ref defaultFS, supportedFeatures); foreach (FrameSettingsField field in Enum.GetValues(typeof(FrameSettingsField))) { tester.SetEnabled(field, fso.mask[(uint)field] ? fs.IsEnabled(field) : defaultFS.IsEnabled(field)); } FrameSettings.Sanitize(ref tester, cam, supportedFeatures); //test Assert.AreEqual(result, tester); Object.DestroyImmediate(go); } } // deactivate this test for template package making issue //[Test] public void FrameSettingsHistoryAggregation() { for (int i = 0; i < 10; ++i) { //init FrameSettings fs = default; FrameSettingsOverrideMask fso = default; FrameSettingsRenderType defaultFSType = RandomUtilities.RandomEnumValue<FrameSettingsRenderType>(i); FrameSettings defaultFS; FrameSettings result = FrameSettings.defaultCamera; FrameSettings tester = default; RenderPipelineSettings supportedFeatures = new RenderPipelineSettings(); switch (defaultFSType) { case FrameSettingsRenderType.Camera: defaultFS = FrameSettings.defaultCamera; break; case FrameSettingsRenderType.CustomOrBakedReflection: defaultFS = FrameSettings.defaultCustomOrBakeReflectionProbe; break; case FrameSettingsRenderType.RealtimeReflection: defaultFS = FrameSettings.defaultRealtimeReflectionProbe; break; default: throw new ArgumentException("Unknown FrameSettingsRenderType"); } //change randomly override values for (int j = 0; j < 10; ++j) { FrameSettingsField field = RandomUtilities.RandomEnumValue<FrameSettingsField>((i + 0.5f) * (j + 0.3f)); fs.SetEnabled(field, RandomUtilities.RandomBool((i + 1) * j)); fso.mask[(uint)field] = true; } //create and init gameobjects var go = new GameObject("TestObject"); m_ToClean = go; var cam = go.AddComponent<Camera>(); var add = cam.GetComponent<HDAdditionalCameraData>() ?? cam.gameObject.AddComponent<HDAdditionalCameraData>(); Assert.True(add != null && !add.Equals(null)); add.renderingPathCustomFrameSettings = fs; add.renderingPathCustomFrameSettingsOverrideMask = fso; add.defaultFrameSettings = defaultFSType; add.customRenderingSettings = true; //gather data two different ways FrameSettingsHistory.AggregateFrameSettings(ref result, cam, add, ref defaultFS, supportedFeatures); foreach (FrameSettingsField field in Enum.GetValues(typeof(FrameSettingsField))) { tester.SetEnabled(field, fso.mask[(uint)field] ? fs.IsEnabled(field) : defaultFS.IsEnabled(field)); } FrameSettings.Sanitize(ref tester, cam, supportedFeatures); //simulate debugmenu changes for (int j = 0; j < 10; ++j) { FrameSettingsField field = RandomUtilities.RandomEnumValue<FrameSettingsField>((i + 0.5f) * (j + 0.3f)); var fsh = FrameSettingsHistory.frameSettingsHistory[cam]; bool debugValue = RandomUtilities.RandomBool((i + 1) * j); fsh.debug.SetEnabled(field, debugValue); FrameSettingsHistory.frameSettingsHistory[cam] = fsh; tester.SetEnabled(field, debugValue); } //test result = FrameSettingsHistory.frameSettingsHistory[cam].debug; Assert.AreEqual(result, tester); Object.DestroyImmediate(go); } } public enum LegacyLitShaderMode { Forward, Deferred } public enum LegacyLightLoopSettingsOverrides { FptlForForwardOpaque = 1 << 0, BigTilePrepass = 1 << 1, ComputeLightEvaluation = 1 << 2, ComputeLightVariants = 1 << 3, ComputeMaterialVariants = 1 << 4, TileAndCluster = 1 << 5, } public enum LegacyFrameSettingsOverrides { //lighting settings Shadow = 1 << 0, ContactShadow = 1 << 1, ShadowMask = 1 << 2, SSR = 1 << 3, SSAO = 1 << 4, SubsurfaceScattering = 1 << 5, Transmission = 1 << 6, AtmosphericScaterring = 1 << 7, Volumetrics = 1 << 8, ReprojectionForVolumetrics = 1 << 9, LightLayers = 1 << 10, MSAA = 1 << 11, ExposureControl = 1 << 12, //rendering pass TransparentPrepass = 1 << 13, TransparentPostpass = 1 << 14, MotionVectors = 1 << 15, ObjectMotionVectors = 1 << 16, Decals = 1 << 17, RoughRefraction = 1 << 18, Distortion = 1 << 19, Postprocess = 1 << 20, //rendering settings ShaderLitMode = 1 << 21, DepthPrepassWithDeferredRendering = 1 << 22, OpaqueObjects = 1 << 24, TransparentObjects = 1 << 25, RealtimePlanarReflection = 1 << 26, // Async settings AsyncCompute = 1 << 23, LightListAsync = 1 << 27, SSRAsync = 1 << 28, SSAOAsync = 1 << 29, ContactShadowsAsync = 1 << 30, VolumeVoxelizationsAsync = 1 << 31, } public class LegacyLightLoopSettings { public LegacyLightLoopSettingsOverrides overrides; public bool enableDeferredTileAndCluster; public bool enableComputeLightEvaluation; public bool enableComputeLightVariants; public bool enableComputeMaterialVariants; public bool enableFptlForForwardOpaque; public bool enableBigTilePrepass; public bool isFptlEnabled; } public class LegacyFrameSettings { public LegacyFrameSettingsOverrides overrides; public bool enableShadow; public bool enableContactShadows; public bool enableShadowMask; public bool enableSSR; public bool enableSSAO; public bool enableSubsurfaceScattering; public bool enableTransmission; public bool enableAtmosphericScattering; public bool enableVolumetrics; public bool enableReprojectionForVolumetrics; public bool enableLightLayers; public bool enableExposureControl; public float diffuseGlobalDimmer; public float specularGlobalDimmer; public LegacyLitShaderMode shaderLitMode; public bool enableDepthPrepassWithDeferredRendering; public bool enableTransparentPrepass; public bool enableMotionVectors; // Enable/disable whole motion vectors pass (Camera + Object). public bool enableObjectMotionVectors; public bool enableDecals; public bool enableRoughRefraction; // Depends on DepthPyramid - If not enable, just do a copy of the scene color (?) - how to disable rough refraction ? public bool enableTransparentPostpass; public bool enableDistortion; public bool enablePostprocess; public bool enableOpaqueObjects; public bool enableTransparentObjects; public bool enableRealtimePlanarReflection; public bool enableMSAA; public bool enableAsyncCompute; public bool runLightListAsync; public bool runSSRAsync; public bool runSSAOAsync; public bool runContactShadowsAsync; public bool runVolumeVoxelizationAsync; public LegacyLightLoopSettings lightLoopSettings; } static object[] s_LegacyFrameSettingsDatas = { new LegacyFrameSettings { overrides = LegacyFrameSettingsOverrides.SSR | LegacyFrameSettingsOverrides.MSAA | LegacyFrameSettingsOverrides.ShaderLitMode, enableSSR = true, enableMSAA = true, shaderLitMode = LegacyLitShaderMode.Deferred, lightLoopSettings = new LegacyLightLoopSettings() }, new LegacyFrameSettings { overrides = LegacyFrameSettingsOverrides.ObjectMotionVectors | LegacyFrameSettingsOverrides.OpaqueObjects | LegacyFrameSettingsOverrides.ShaderLitMode, enableOpaqueObjects = false, enableMSAA = true, enableMotionVectors = true, shaderLitMode = LegacyLitShaderMode.Forward, lightLoopSettings = new LegacyLightLoopSettings() }, new LegacyFrameSettings { overrides = LegacyFrameSettingsOverrides.Postprocess | LegacyFrameSettingsOverrides.Shadow | LegacyFrameSettingsOverrides.ShaderLitMode, diffuseGlobalDimmer = 42f, enableMSAA = true, enablePostprocess = false, lightLoopSettings = new LegacyLightLoopSettings { overrides = LegacyLightLoopSettingsOverrides.ComputeLightVariants | LegacyLightLoopSettingsOverrides.ComputeLightEvaluation, enableComputeLightVariants = true, enableComputeMaterialVariants = false } } }; [Test, TestCaseSource(nameof(s_LegacyFrameSettingsDatas))] public void MigrationTest(LegacyFrameSettings legacyFrameSettingsData) { using (new PrefabMigrationTests( GetType().Name, GeneratePrefabYAML(legacyFrameSettingsData), out GameObject prefab )) { var instance = Object.Instantiate(prefab); m_ToClean = instance; var probe = instance.GetComponent<HDAdditionalReflectionData>(); prefab.SetActive(true); probe.enabled = true; var frameSettingsData = probe.frameSettings; var frameSettingsMask = probe.frameSettingsOverrideMask; LitShaderMode litShaderModeEquivalent; switch (legacyFrameSettingsData.shaderLitMode) { case LegacyLitShaderMode.Deferred: litShaderModeEquivalent = LitShaderMode.Deferred; break; case LegacyLitShaderMode.Forward: litShaderModeEquivalent = LitShaderMode.Forward; break; default: throw new ArgumentException("Unknown LitShaderMode"); } Assert.AreEqual(litShaderModeEquivalent, frameSettingsData.litShaderMode); Assert.AreEqual(legacyFrameSettingsData.enableShadow, frameSettingsData.IsEnabled(FrameSettingsField.Shadow)); Assert.AreEqual(legacyFrameSettingsData.enableContactShadows, frameSettingsData.IsEnabled(FrameSettingsField.ContactShadows)); Assert.AreEqual(legacyFrameSettingsData.enableShadowMask, frameSettingsData.IsEnabled(FrameSettingsField.ShadowMask)); Assert.AreEqual(legacyFrameSettingsData.enableSSR, frameSettingsData.IsEnabled(FrameSettingsField.SSR)); Assert.AreEqual(legacyFrameSettingsData.enableSSAO, frameSettingsData.IsEnabled(FrameSettingsField.SSAO)); Assert.AreEqual(legacyFrameSettingsData.enableSubsurfaceScattering, frameSettingsData.IsEnabled(FrameSettingsField.SubsurfaceScattering)); Assert.AreEqual(legacyFrameSettingsData.enableTransmission, frameSettingsData.IsEnabled(FrameSettingsField.Transmission)); Assert.AreEqual(legacyFrameSettingsData.enableAtmosphericScattering, frameSettingsData.IsEnabled(FrameSettingsField.AtmosphericScattering)); Assert.AreEqual(legacyFrameSettingsData.enableVolumetrics, frameSettingsData.IsEnabled(FrameSettingsField.Volumetrics)); Assert.AreEqual(legacyFrameSettingsData.enableReprojectionForVolumetrics, frameSettingsData.IsEnabled(FrameSettingsField.ReprojectionForVolumetrics)); Assert.AreEqual(legacyFrameSettingsData.enableLightLayers, frameSettingsData.IsEnabled(FrameSettingsField.LightLayers)); Assert.AreEqual(legacyFrameSettingsData.enableExposureControl, frameSettingsData.IsEnabled(FrameSettingsField.ExposureControl)); Assert.AreEqual(legacyFrameSettingsData.enableDepthPrepassWithDeferredRendering, frameSettingsData.IsEnabled(FrameSettingsField.DepthPrepassWithDeferredRendering)); Assert.AreEqual(legacyFrameSettingsData.enableTransparentPrepass, frameSettingsData.IsEnabled(FrameSettingsField.TransparentPrepass)); Assert.AreEqual(legacyFrameSettingsData.enableMotionVectors, frameSettingsData.IsEnabled(FrameSettingsField.MotionVectors)); Assert.AreEqual(legacyFrameSettingsData.enableObjectMotionVectors, frameSettingsData.IsEnabled(FrameSettingsField.ObjectMotionVectors)); Assert.AreEqual(legacyFrameSettingsData.enableDecals, frameSettingsData.IsEnabled(FrameSettingsField.Decals)); Assert.AreEqual(legacyFrameSettingsData.enableRoughRefraction, frameSettingsData.IsEnabled(FrameSettingsField.RoughRefraction)); Assert.AreEqual(legacyFrameSettingsData.enableTransparentPostpass, frameSettingsData.IsEnabled(FrameSettingsField.TransparentPostpass)); Assert.AreEqual(legacyFrameSettingsData.enableDistortion, frameSettingsData.IsEnabled(FrameSettingsField.Distortion)); Assert.AreEqual(legacyFrameSettingsData.enablePostprocess, frameSettingsData.IsEnabled(FrameSettingsField.Postprocess)); Assert.AreEqual(legacyFrameSettingsData.enableOpaqueObjects, frameSettingsData.IsEnabled(FrameSettingsField.OpaqueObjects)); Assert.AreEqual(legacyFrameSettingsData.enableTransparentObjects, frameSettingsData.IsEnabled(FrameSettingsField.TransparentObjects)); Assert.AreEqual(legacyFrameSettingsData.enableRealtimePlanarReflection, frameSettingsData.IsEnabled(FrameSettingsField.RealtimePlanarReflection)); Assert.AreEqual(legacyFrameSettingsData.enableMSAA, frameSettingsData.IsEnabled(FrameSettingsField.MSAA)); Assert.AreEqual(legacyFrameSettingsData.enableAsyncCompute, frameSettingsData.IsEnabled(FrameSettingsField.AsyncCompute)); Assert.AreEqual(legacyFrameSettingsData.runLightListAsync, frameSettingsData.IsEnabled(FrameSettingsField.LightListAsync)); Assert.AreEqual(legacyFrameSettingsData.runSSRAsync, frameSettingsData.IsEnabled(FrameSettingsField.SSRAsync)); Assert.AreEqual(legacyFrameSettingsData.runSSAOAsync, frameSettingsData.IsEnabled(FrameSettingsField.SSAOAsync)); Assert.AreEqual(legacyFrameSettingsData.runContactShadowsAsync, frameSettingsData.IsEnabled(FrameSettingsField.ContactShadowsAsync)); Assert.AreEqual(legacyFrameSettingsData.runVolumeVoxelizationAsync, frameSettingsData.IsEnabled(FrameSettingsField.VolumeVoxelizationsAsync)); Assert.AreEqual(legacyFrameSettingsData.lightLoopSettings.enableBigTilePrepass, frameSettingsData.IsEnabled(FrameSettingsField.BigTilePrepass)); Assert.AreEqual(legacyFrameSettingsData.lightLoopSettings.enableComputeLightEvaluation, frameSettingsData.IsEnabled(FrameSettingsField.ComputeLightEvaluation)); Assert.AreEqual(legacyFrameSettingsData.lightLoopSettings.enableComputeLightVariants, frameSettingsData.IsEnabled(FrameSettingsField.ComputeLightVariants)); Assert.AreEqual(legacyFrameSettingsData.lightLoopSettings.enableComputeMaterialVariants, frameSettingsData.IsEnabled(FrameSettingsField.ComputeMaterialVariants)); Assert.AreEqual(legacyFrameSettingsData.lightLoopSettings.enableDeferredTileAndCluster, frameSettingsData.IsEnabled(FrameSettingsField.DeferredTile)); Assert.AreEqual(legacyFrameSettingsData.lightLoopSettings.enableFptlForForwardOpaque, frameSettingsData.IsEnabled(FrameSettingsField.FPTLForForwardOpaque)); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.Shadow) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.Shadow]); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.ContactShadow) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.ContactShadows]); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.ShadowMask) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.ShadowMask]); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.SSR) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.SSR]); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.SSAO) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.SSAO]); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.SubsurfaceScattering) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.SubsurfaceScattering]); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.Transmission) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.Transmission]); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.AtmosphericScaterring) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.AtmosphericScattering]); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.Volumetrics) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.Volumetrics]); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.ReprojectionForVolumetrics) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.ReprojectionForVolumetrics]); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.LightLayers) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.LightLayers]); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.ExposureControl) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.ExposureControl]); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.DepthPrepassWithDeferredRendering) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.DepthPrepassWithDeferredRendering]); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.TransparentPrepass) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.TransparentPrepass]); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.MotionVectors) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.MotionVectors]); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.ObjectMotionVectors) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.ObjectMotionVectors]); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.Decals) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.Decals]); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.RoughRefraction) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.RoughRefraction]); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.TransparentPostpass) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.TransparentPostpass]); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.Distortion) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.Distortion]); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.Postprocess) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.Postprocess]); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.OpaqueObjects) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.OpaqueObjects]); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.TransparentObjects) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.TransparentObjects]); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.RealtimePlanarReflection) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.RealtimePlanarReflection]); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.MSAA) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.MSAA]); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.AsyncCompute) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.AsyncCompute]); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.LightListAsync) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.LightListAsync]); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.SSRAsync) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.SSRAsync]); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.SSAOAsync) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.SSAOAsync]); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.ContactShadowsAsync) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.ContactShadowsAsync]); Assert.AreEqual((legacyFrameSettingsData.overrides & LegacyFrameSettingsOverrides.VolumeVoxelizationsAsync) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.VolumeVoxelizationsAsync]); Assert.AreEqual((legacyFrameSettingsData.lightLoopSettings.overrides & LegacyLightLoopSettingsOverrides.BigTilePrepass) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.BigTilePrepass]); Assert.AreEqual((legacyFrameSettingsData.lightLoopSettings.overrides & LegacyLightLoopSettingsOverrides.ComputeLightEvaluation) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.ComputeLightEvaluation]); Assert.AreEqual((legacyFrameSettingsData.lightLoopSettings.overrides & LegacyLightLoopSettingsOverrides.ComputeLightVariants) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.ComputeLightVariants]); Assert.AreEqual((legacyFrameSettingsData.lightLoopSettings.overrides & LegacyLightLoopSettingsOverrides.ComputeMaterialVariants) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.ComputeMaterialVariants]); Assert.AreEqual((legacyFrameSettingsData.lightLoopSettings.overrides & LegacyLightLoopSettingsOverrides.TileAndCluster) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.DeferredTile]); Assert.AreEqual((legacyFrameSettingsData.lightLoopSettings.overrides & LegacyLightLoopSettingsOverrides.FptlForForwardOpaque) > 0, frameSettingsMask.mask[(uint)FrameSettingsField.FPTLForForwardOpaque]); } } string GeneratePrefabYAML(LegacyFrameSettings legacyFrameSettings) => $@"%YAML 1.1 %TAG !u! tag:unity3d.com,2011: --- !u!1 &3102262843427888416 GameObject: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {{fileID: 0}} m_PrefabInstance: {{fileID: 0}} m_PrefabAsset: {{fileID: 0}} serializedVersion: 6 m_Component: - component: {{fileID: 3102262843427888420}} - component: {{fileID: 3102262843427888421}} - component: {{fileID: 3102262843427888418}} m_Layer: 5 m_Name: Reflection Probe m_TagString: Untagged m_Icon: {{fileID: 0}} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 --- !u!4 &3102262843427888420 Transform: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {{fileID: 0}} m_PrefabInstance: {{fileID: 0}} m_PrefabAsset: {{fileID: 0}} m_GameObject: {{fileID: 3102262843427888416}} m_LocalRotation: {{x: 0, y: 0, z: 0.26681787, w: 0.963747}} m_LocalPosition: {{x: 3.9601986, y: 0.8451278, z: -1.4354408}} m_LocalScale: {{x: 1, y: 1, z: 1}} m_Children: [] m_Father: {{fileID: 0}} m_RootOrder: 0 m_LocalEulerAnglesHint: {{x: 0, y: 0, z: 30.95}} --- !u!215 &3102262843427888421 ReflectionProbe: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {{fileID: 0}} m_PrefabInstance: {{fileID: 0}} m_PrefabAsset: {{fileID: 0}} m_GameObject: {{fileID: 3102262843427888416}} m_Enabled: 1 serializedVersion: 2 m_Type: 0 m_Mode: 1 m_RefreshMode: 0 m_TimeSlicingMode: 0 m_Resolution: 128 m_UpdateFrequency: 0 m_BoxSize: {{x: 6, y: 6, z: 6}} m_BoxOffset: {{x: 0.32623026, y: 1.5948586, z: 1.3}} m_NearClip: 2.76 m_FarClip: 5 m_ShadowDistance: 100 m_ClearFlags: 2 m_BackGroundColor: {{r: 0.1882353, g: 0.023529412, b: 0.13529739, a: 0}} m_CullingMask: serializedVersion: 2 m_Bits: 310 m_IntensityMultiplier: 1 m_BlendDistance: 0 m_HDR: 1 m_BoxProjection: 0 m_RenderDynamicObjects: 0 m_UseOcclusionCulling: 1 m_Importance: 1 m_CustomBakedTexture: {{fileID: 8900000, guid: b7a0288be1440c140849eb49d3b12573, type: 3}} --- !u!114 &3102262843427888418 MonoBehaviour: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {{fileID: 0}} m_PrefabInstance: {{fileID: 0}} m_PrefabAsset: {{fileID: 0}} m_GameObject: {{fileID: 3102262843427888416}} m_Enabled: 1 m_EditorHideFlags: 0 m_Script: {{fileID: 11500000, guid: d0ef8dc2c2eabfa4e8cb77be57a837c0, type: 3}} m_Name: m_EditorClassIdentifier: m_ProxyVolume: {{fileID: 0}} m_InfiniteProjection: 1 m_InfluenceVolume: m_Shape: 1 m_Offset: {{x: 1.1, y: 1.2, z: 1.3}} m_BoxSize: {{x: 7, y: 8, z: 9}} m_BoxBlendDistancePositive: {{x: 1, y: 2, z: 3}} m_BoxBlendDistanceNegative: {{x: 1.5, y: 2.5, z: 3.5}} m_BoxBlendNormalDistancePositive: {{x: 0.5, y: 0.4, z: 0.3}} m_BoxBlendNormalDistanceNegative: {{x: 0.2, y: 0.1, z: 0.6}} m_BoxSideFadePositive: {{x: 0.1, y: 0.2, z: 0.3}} m_BoxSideFadeNegative: {{x: 0.15, y: 0.25, z: 0.35}} m_EditorAdvancedModeBlendDistancePositive: {{x: 1, y: 2, z: 3}} m_EditorAdvancedModeBlendDistanceNegative: {{x: 1.5, y: 2.5, z: 3.5}} m_EditorSimplifiedModeBlendDistance: 3.5 m_EditorAdvancedModeBlendNormalDistancePositive: {{x: 0.5, y: 0.4, z: 0.3}} m_EditorAdvancedModeBlendNormalDistanceNegative: {{x: 0.2, y: 0.1, z: 0.6}} m_EditorSimplifiedModeBlendNormalDistance: 4.5 m_EditorAdvancedModeEnabled: 1 m_EditorAdvancedModeFaceFadePositive: {{x: 0.1, y: 0.2, z: 0.3}} m_EditorAdvancedModeFaceFadeNegative: {{x: 0.15, y: 0.25, z: 0.35}} m_SphereRadius: 6 m_SphereBlendDistance: 2 m_SphereBlendNormalDistance: 1 m_Version: 1 m_ObsoleteSphereBaseOffset: {{x: 0, y: 0, z: 0}} m_FrameSettings: overrides: {legacyFrameSettings.overrides} enableShadow: {(legacyFrameSettings.enableShadow ? 1 : 0)} enableContactShadows: {(legacyFrameSettings.enableContactShadows ? 1 : 0)} enableShadowMask: {(legacyFrameSettings.enableShadowMask ? 1 : 0)} enableSSR: {(legacyFrameSettings.enableSSR ? 1 : 0)} enableSSAO: {(legacyFrameSettings.enableSSAO ? 1 : 0)} enableSubsurfaceScattering: {(legacyFrameSettings.enableSubsurfaceScattering ? 1 : 0)} enableTransmission: {(legacyFrameSettings.enableTransmission ? 1 : 0)} enableAtmosphericScattering: {(legacyFrameSettings.enableAtmosphericScattering ? 1 : 0)} enableVolumetrics: {(legacyFrameSettings.enableVolumetrics ? 1 : 0)} enableReprojectionForVolumetrics: {(legacyFrameSettings.enableReprojectionForVolumetrics ? 1 : 0)} enableLightLayers: {(legacyFrameSettings.enableLightLayers ? 1 : 0)} enableExposureControl: {(legacyFrameSettings.enableExposureControl ? 1 : 0)} diffuseGlobalDimmer: {legacyFrameSettings.diffuseGlobalDimmer} specularGlobalDimmer: {legacyFrameSettings.specularGlobalDimmer} shaderLitMode: {(legacyFrameSettings.shaderLitMode == LegacyLitShaderMode.Deferred ? 1 : 0)} enableDepthPrepassWithDeferredRendering: {(legacyFrameSettings.enableDepthPrepassWithDeferredRendering ? 1 : 0)} enableTransparentPrepass: {(legacyFrameSettings.enableTransparentPrepass ? 1 : 0)} enableMotionVectors: {(legacyFrameSettings.enableMotionVectors ? 1 : 0)} enableObjectMotionVectors: {(legacyFrameSettings.enableObjectMotionVectors ? 1 : 0)} enableDecals: {(legacyFrameSettings.enableDecals ? 1 : 0)} enableRoughRefraction: {(legacyFrameSettings.enableRoughRefraction ? 1 : 0)} enableTransparentPostpass: {(legacyFrameSettings.enableTransparentPostpass ? 1 : 0)} enableDistortion: {(legacyFrameSettings.enableDistortion ? 1 : 0)} enablePostprocess: {(legacyFrameSettings.enablePostprocess ? 1 : 0)} enableAsyncCompute: {(legacyFrameSettings.enableAsyncCompute ? 1 : 0)} runLightListAsync: {(legacyFrameSettings.runLightListAsync ? 1 : 0)} runSSRAsync: {(legacyFrameSettings.runSSRAsync ? 1 : 0)} runSSAOAsync: {(legacyFrameSettings.runSSAOAsync ? 1 : 0)} runContactShadowsAsync: {(legacyFrameSettings.runContactShadowsAsync ? 1 : 0)} runVolumeVoxelizationAsync: {(legacyFrameSettings.runVolumeVoxelizationAsync ? 1 : 0)} enableOpaqueObjects: {(legacyFrameSettings.enableOpaqueObjects ? 1 : 0)} enableTransparentObjects: {(legacyFrameSettings.enableTransparentObjects ? 1 : 0)} enableRealtimePlanarReflection: {(legacyFrameSettings.enableRealtimePlanarReflection ? 1 : 0)} enableMSAA: {(legacyFrameSettings.enableMSAA ? 1 : 0)} lightLoopSettings: overrides: {legacyFrameSettings.lightLoopSettings.overrides} enableTileAndCluster: {(legacyFrameSettings.lightLoopSettings.enableDeferredTileAndCluster ? 1 : 0)} enableComputeLightEvaluation: {(legacyFrameSettings.lightLoopSettings.enableComputeLightEvaluation ? 1 : 0)} enableComputeLightVariants: {(legacyFrameSettings.lightLoopSettings.enableComputeLightVariants ? 1 : 0)} enableComputeMaterialVariants: {(legacyFrameSettings.lightLoopSettings.enableComputeMaterialVariants ? 1 : 0)} enableFptlForForwardOpaque: {(legacyFrameSettings.lightLoopSettings.enableFptlForForwardOpaque ? 1 : 0)} enableBigTilePrepass: {(legacyFrameSettings.lightLoopSettings.enableBigTilePrepass ? 1 : 0)} isFptlEnabled: {(legacyFrameSettings.lightLoopSettings.isFptlEnabled ? 1 : 0)} m_CaptureSettings: overrides: 0 clearColorMode: 2 backgroundColorHDR: {{r: 0.1882353, g: 0.023529412, b: 0.13529739, a: 0}} clearDepth: 0 cullingMask: serializedVersion: 2 m_Bits: 310 useOcclusionCulling: 0 volumeLayerMask: serializedVersion: 2 m_Bits: 33 volumeAnchorOverride: {{fileID: 0}} projection: 0 nearClipPlane: 2.76 farClipPlane: 5 fieldOfView: 90 orthographicSize: 5 renderingPath: 1 shadowDistance: 666 m_Multiplier: 20 m_Weight: 0.66 m_Mode: 1 m_RefreshMode: 1 m_CustomTexture: {{fileID: 0}} m_BakedTexture: {{fileID: 0}} m_RenderDynamicObjects: 0 lightLayers: 9 m_ReflectionProbeVersion: 6 m_ObsoleteInfluenceShape: 0 m_ObsoleteInfluenceSphereRadius: 3 m_ObsoleteBlendDistancePositive: {{x: 0, y: 0, z: 0}} m_ObsoleteBlendDistanceNegative: {{x: 0, y: 0, z: 0}} m_ObsoleteBlendNormalDistancePositive: {{x: 0, y: 0, z: 0}} m_ObsoleteBlendNormalDistanceNegative: {{x: 0, y: 0, z: 0}} m_ObsoleteBoxSideFadePositive: {{x: 1, y: 1, z: 1}} m_ObsoleteBoxSideFadeNegative: {{x: 1, y: 1, z: 1}} "; } }
using System; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Collections.Specialized; using System.ComponentModel; using System.Runtime.CompilerServices; using Xamarin.Forms.Platform; namespace Xamarin.Forms { [RenderWith(typeof(_PickerRenderer))] public class Picker : View, IElementConfiguration<Picker> { public static readonly BindableProperty TextColorProperty = BindableProperty.Create(nameof(TextColor), typeof(Color), typeof(Picker), Color.Default); public static readonly BindableProperty TitleProperty = BindableProperty.Create(nameof(Title), typeof(string), typeof(Picker), default(string)); public static readonly BindableProperty SelectedIndexProperty = BindableProperty.Create(nameof(SelectedIndex), typeof(int), typeof(Picker), -1, BindingMode.TwoWay, propertyChanged: OnSelectedIndexChanged, coerceValue: CoerceSelectedIndex); public static readonly BindableProperty ItemsSourceProperty = BindableProperty.Create(nameof(ItemsSource), typeof(IList), typeof(Picker), default(IList), propertyChanged: OnItemsSourceChanged); public static readonly BindableProperty SelectedItemProperty = BindableProperty.Create(nameof(SelectedItem), typeof(object), typeof(Picker), null, BindingMode.TwoWay, propertyChanged: OnSelectedItemChanged); readonly Lazy<PlatformConfigurationRegistry<Picker>> _platformConfigurationRegistry; public Picker() { ((INotifyCollectionChanged)Items).CollectionChanged += OnItemsCollectionChanged; _platformConfigurationRegistry = new Lazy<PlatformConfigurationRegistry<Picker>>(() => new PlatformConfigurationRegistry<Picker>(this)); } public IList<string> Items { get; } = new LockableObservableListWrapper(); public IList ItemsSource { get { return (IList)GetValue(ItemsSourceProperty); } set { SetValue(ItemsSourceProperty, value); } } public int SelectedIndex { get { return (int)GetValue(SelectedIndexProperty); } set { SetValue(SelectedIndexProperty, value); } } public object SelectedItem { get { return GetValue(SelectedItemProperty); } set { SetValue(SelectedItemProperty, value); } } public Color TextColor { get { return (Color)GetValue(TextColorProperty); } set { SetValue(TextColorProperty, value); } } public string Title { get { return (string)GetValue(TitleProperty); } set { SetValue(TitleProperty, value); } } BindingBase _itemDisplayBinding; public BindingBase ItemDisplayBinding { get { return _itemDisplayBinding; } set { if (_itemDisplayBinding == value) return; OnPropertyChanging(); var oldValue = value; _itemDisplayBinding = value; OnItemDisplayBindingChanged(oldValue, _itemDisplayBinding); OnPropertyChanged(); } } public event EventHandler SelectedIndexChanged; static readonly BindableProperty s_displayProperty = BindableProperty.Create("Display", typeof(string), typeof(Picker), default(string)); string GetDisplayMember(object item) { if (ItemDisplayBinding == null) return item.ToString(); ItemDisplayBinding.Apply(item, this, s_displayProperty); ItemDisplayBinding.Unapply(); return (string)GetValue(s_displayProperty); } static object CoerceSelectedIndex(BindableObject bindable, object value) { var picker = (Picker)bindable; return picker.Items == null ? -1 : ((int)value).Clamp(-1, picker.Items.Count - 1); } void OnItemDisplayBindingChanged(BindingBase oldValue, BindingBase newValue) { ResetItems(); } void OnItemsCollectionChanged(object sender, NotifyCollectionChangedEventArgs e) { SelectedIndex = SelectedIndex.Clamp(-1, Items.Count - 1); UpdateSelectedItem(); } static void OnItemsSourceChanged(BindableObject bindable, object oldValue, object newValue) { ((Picker)bindable).OnItemsSourceChanged((IList)oldValue, (IList)newValue); } void OnItemsSourceChanged(IList oldValue, IList newValue) { var oldObservable = oldValue as INotifyCollectionChanged; if (oldObservable != null) oldObservable.CollectionChanged -= CollectionChanged; var newObservable = newValue as INotifyCollectionChanged; if (newObservable != null) { newObservable.CollectionChanged += CollectionChanged; } if (newValue != null) { ((LockableObservableListWrapper)Items).IsLocked = true; ResetItems(); } else { ((LockableObservableListWrapper)Items).InternalClear(); ((LockableObservableListWrapper)Items).IsLocked = false; } } void CollectionChanged(object sender, NotifyCollectionChangedEventArgs e) { switch (e.Action) { case NotifyCollectionChangedAction.Add: AddItems(e); break; case NotifyCollectionChangedAction.Remove: RemoveItems(e); break; default: //Move, Replace, Reset ResetItems(); break; } } void AddItems(NotifyCollectionChangedEventArgs e) { int index = e.NewStartingIndex < 0 ? Items.Count : e.NewStartingIndex; foreach (object newItem in e.NewItems) ((LockableObservableListWrapper)Items).InternalInsert(index++, GetDisplayMember(newItem)); } void RemoveItems(NotifyCollectionChangedEventArgs e) { int index = e.OldStartingIndex < Items.Count ? e.OldStartingIndex : Items.Count; foreach (object _ in e.OldItems) ((LockableObservableListWrapper)Items).InternalRemoveAt(index--); } void ResetItems() { if (ItemsSource == null) return; ((LockableObservableListWrapper)Items).InternalClear(); foreach (object item in ItemsSource) ((LockableObservableListWrapper)Items).InternalAdd(GetDisplayMember(item)); UpdateSelectedItem(); } static void OnSelectedIndexChanged(object bindable, object oldValue, object newValue) { var picker = (Picker)bindable; picker.UpdateSelectedItem(); picker.SelectedIndexChanged?.Invoke(bindable, EventArgs.Empty); } static void OnSelectedItemChanged(BindableObject bindable, object oldValue, object newValue) { var picker = (Picker)bindable; picker.UpdateSelectedIndex(newValue); } void UpdateSelectedIndex(object selectedItem) { if (ItemsSource != null) { SelectedIndex = ItemsSource.IndexOf(selectedItem); return; } SelectedIndex = Items.IndexOf(selectedItem); } void UpdateSelectedItem() { if (SelectedIndex == -1) { SelectedItem = null; return; } if (ItemsSource != null) { SelectedItem = ItemsSource [SelectedIndex]; return; } SelectedItem = Items [SelectedIndex]; } public IPlatformElementConfiguration<T, Picker> On<T>() where T : IConfigPlatform { return _platformConfigurationRegistry.Value.On<T>(); } internal class LockableObservableListWrapper : IList<string>, ICollection<string>, INotifyCollectionChanged, INotifyPropertyChanged, IReadOnlyList<string>, IReadOnlyCollection<string>, IEnumerable<string>, IEnumerable { internal readonly ObservableCollection<string> _list = new ObservableCollection<string>(); event NotifyCollectionChangedEventHandler INotifyCollectionChanged.CollectionChanged { add { ((INotifyCollectionChanged)_list).CollectionChanged += value; } remove { ((INotifyCollectionChanged)_list).CollectionChanged -= value; } } event PropertyChangedEventHandler INotifyPropertyChanged.PropertyChanged { add { ((INotifyPropertyChanged)_list).PropertyChanged += value; } remove { ((INotifyPropertyChanged)_list).PropertyChanged -= value; } } public bool IsLocked { get; set; } void ThrowOnLocked() { if (IsLocked) throw new InvalidOperationException("The Items list can not be manipulated if the ItemsSource property is set"); } public string this [int index] { get { return _list [index]; } set { ThrowOnLocked(); _list [index] = value; } } public int Count { get { return _list.Count; } } public bool IsReadOnly { get { return ((IList<string>)_list).IsReadOnly; } } public void InternalAdd(string item) { _list.Add(item); } public void Add(string item) { ThrowOnLocked(); InternalAdd(item); } public void InternalClear() { _list.Clear(); } public void Clear() { ThrowOnLocked(); InternalClear(); } public bool Contains(string item) { return _list.Contains(item); } public void CopyTo(string [] array, int arrayIndex) { _list.CopyTo(array, arrayIndex); } public IEnumerator<string> GetEnumerator() { return _list.GetEnumerator(); } public int IndexOf(string item) { return _list.IndexOf(item); } public void InternalInsert(int index, string item) { _list.Insert(index, item); } public void Insert(int index, string item) { ThrowOnLocked(); InternalInsert(index, item); } public bool InternalRemove(string item) { return _list.Remove(item); } public bool Remove(string item) { ThrowOnLocked(); return InternalRemove(item); } public void InternalRemoveAt(int index) { _list.RemoveAt(index); } public void RemoveAt(int index) { ThrowOnLocked(); InternalRemoveAt(index); } IEnumerator IEnumerable.GetEnumerator() { return ((IEnumerable)_list).GetEnumerator(); } } } }
using System; using System.Collections.Generic; using System.Linq; using System.Threading; using Umbraco.Core.Auditing; using Umbraco.Core.Events; using Umbraco.Core.Logging; using Umbraco.Core.Models; using Umbraco.Core.Models.Rdbms; using Umbraco.Core.Persistence; using Umbraco.Core.Persistence.Querying; using Umbraco.Core.Persistence.UnitOfWork; using Umbraco.Core.PropertyEditors; using umbraco.interfaces; using Umbraco.Core.Exceptions; namespace Umbraco.Core.Services { /// <summary> /// Represents the DataType Service, which is an easy access to operations involving <see cref="IDataTypeDefinition"/> /// </summary> public class DataTypeService : RepositoryService, IDataTypeService { public DataTypeService(IDatabaseUnitOfWorkProvider provider, RepositoryFactory repositoryFactory, ILogger logger, IEventMessagesFactory eventMessagesFactory) : base(provider, repositoryFactory, logger, eventMessagesFactory) { } #region Containers public Attempt<OperationStatus<EntityContainer, OperationStatusType>> CreateContainer(int parentId, string name, int userId = 0) { var evtMsgs = EventMessagesFactory.Get(); var uow = UowProvider.GetUnitOfWork(); using (var repo = RepositoryFactory.CreateEntityContainerRepository(uow, Constants.ObjectTypes.DataTypeContainerGuid)) { try { var container = new EntityContainer(Constants.ObjectTypes.DataTypeGuid) { Name = name, ParentId = parentId, CreatorId = userId }; if (SavingContainer.IsRaisedEventCancelled( new SaveEventArgs<EntityContainer>(container, evtMsgs), this)) { return Attempt.Fail(new OperationStatus<EntityContainer, OperationStatusType>(container, OperationStatusType.FailedCancelledByEvent, evtMsgs)); } repo.AddOrUpdate(container); uow.Commit(); SavedContainer.RaiseEvent(new SaveEventArgs<EntityContainer>(container, evtMsgs), this); //TODO: Audit trail ? return Attempt.Succeed(new OperationStatus<EntityContainer, OperationStatusType>(container, OperationStatusType.Success, evtMsgs)); } catch (Exception ex) { return Attempt.Fail(new OperationStatus<EntityContainer, OperationStatusType>(null, OperationStatusType.FailedExceptionThrown, evtMsgs), ex); } } } public EntityContainer GetContainer(int containerId) { var uow = UowProvider.GetUnitOfWork(); using (var repo = RepositoryFactory.CreateEntityContainerRepository(uow, Constants.ObjectTypes.DataTypeContainerGuid)) { var container = repo.Get(containerId); return container; } } public EntityContainer GetContainer(Guid containerId) { var uow = UowProvider.GetUnitOfWork(); using (var repo = RepositoryFactory.CreateEntityContainerRepository(uow, Constants.ObjectTypes.DataTypeContainerGuid)) { var container = repo.Get(containerId); return container; } } public IEnumerable<EntityContainer> GetContainers(string name, int level) { var uow = UowProvider.GetUnitOfWork(); using (var repo = RepositoryFactory.CreateEntityContainerRepository(uow, Constants.ObjectTypes.DataTypeContainerGuid)) { return repo.Get(name, level); } } public IEnumerable<EntityContainer> GetContainers(IDataTypeDefinition dataTypeDefinition) { var ancestorIds = dataTypeDefinition.Path.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries) .Select(x => { var asInt = x.TryConvertTo<int>(); if (asInt) return asInt.Result; return int.MinValue; }) .Where(x => x != int.MinValue && x != dataTypeDefinition.Id) .ToArray(); return GetContainers(ancestorIds); } public IEnumerable<EntityContainer> GetContainers(int[] containerIds) { var uow = UowProvider.GetUnitOfWork(); using (var repo = RepositoryFactory.CreateEntityContainerRepository(uow, Constants.ObjectTypes.DataTypeContainerGuid)) { return repo.GetAll(containerIds); } } public Attempt<OperationStatus> SaveContainer(EntityContainer container, int userId = 0) { var evtMsgs = EventMessagesFactory.Get(); if (container.ContainedObjectType != Constants.ObjectTypes.DataTypeGuid) { var ex = new InvalidOperationException("Not a " + Constants.ObjectTypes.DataTypeGuid + " container."); return OperationStatus.Exception(evtMsgs, ex); } if (container.HasIdentity && container.IsPropertyDirty("ParentId")) { var ex = new InvalidOperationException("Cannot save a container with a modified parent, move the container instead."); return OperationStatus.Exception(evtMsgs, ex); } if (SavingContainer.IsRaisedEventCancelled( new SaveEventArgs<EntityContainer>(container, evtMsgs), this)) { return OperationStatus.Cancelled(evtMsgs); } var uow = UowProvider.GetUnitOfWork(); using (var repo = RepositoryFactory.CreateEntityContainerRepository(uow, Constants.ObjectTypes.DataTypeContainerGuid)) { repo.AddOrUpdate(container); uow.Commit(); } SavedContainer.RaiseEvent(new SaveEventArgs<EntityContainer>(container, evtMsgs), this); //TODO: Audit trail ? return OperationStatus.Success(evtMsgs); } public Attempt<OperationStatus> DeleteContainer(int containerId, int userId = 0) { var evtMsgs = EventMessagesFactory.Get(); var uow = UowProvider.GetUnitOfWork(); using (var repo = RepositoryFactory.CreateEntityContainerRepository(uow, Constants.ObjectTypes.DataTypeContainerGuid)) { var container = repo.Get(containerId); if (container == null) return OperationStatus.NoOperation(evtMsgs); if (DeletingContainer.IsRaisedEventCancelled( new DeleteEventArgs<EntityContainer>(container, evtMsgs), this)) { return Attempt.Fail(new OperationStatus(OperationStatusType.FailedCancelledByEvent, evtMsgs)); } repo.Delete(container); uow.Commit(); DeletedContainer.RaiseEvent(new DeleteEventArgs<EntityContainer>(container, evtMsgs), this); return OperationStatus.Success(evtMsgs); //TODO: Audit trail ? } } #endregion /// <summary> /// Gets a <see cref="IDataTypeDefinition"/> by its Name /// </summary> /// <param name="name">Name of the <see cref="IDataTypeDefinition"/></param> /// <returns><see cref="IDataTypeDefinition"/></returns> public IDataTypeDefinition GetDataTypeDefinitionByName(string name) { using (var repository = RepositoryFactory.CreateDataTypeDefinitionRepository(UowProvider.GetUnitOfWork())) { return repository.GetByQuery(new Query<IDataTypeDefinition>().Where(x => x.Name == name)).FirstOrDefault(); } } /// <summary> /// Gets a <see cref="IDataTypeDefinition"/> by its Id /// </summary> /// <param name="id">Id of the <see cref="IDataTypeDefinition"/></param> /// <returns><see cref="IDataTypeDefinition"/></returns> public IDataTypeDefinition GetDataTypeDefinitionById(int id) { using (var repository = RepositoryFactory.CreateDataTypeDefinitionRepository(UowProvider.GetUnitOfWork())) { return repository.Get(id); } } /// <summary> /// Gets a <see cref="IDataTypeDefinition"/> by its unique guid Id /// </summary> /// <param name="id">Unique guid Id of the DataType</param> /// <returns><see cref="IDataTypeDefinition"/></returns> public IDataTypeDefinition GetDataTypeDefinitionById(Guid id) { using (var repository = RepositoryFactory.CreateDataTypeDefinitionRepository(UowProvider.GetUnitOfWork())) { var query = Query<IDataTypeDefinition>.Builder.Where(x => x.Key == id); var definitions = repository.GetByQuery(query); return definitions.FirstOrDefault(); } } /// <summary> /// Gets a <see cref="IDataTypeDefinition"/> by its control Id /// </summary> /// <param name="id">Id of the DataType control</param> /// <returns>Collection of <see cref="IDataTypeDefinition"/> objects with a matching contorl id</returns> [Obsolete("Property editor's are defined by a string alias from version 7 onwards, use the overload GetDataTypeDefinitionByPropertyEditorAlias instead")] public IEnumerable<IDataTypeDefinition> GetDataTypeDefinitionByControlId(Guid id) { var alias = LegacyPropertyEditorIdToAliasConverter.GetAliasFromLegacyId(id, true); return GetDataTypeDefinitionByPropertyEditorAlias(alias); } /// <summary> /// Gets a <see cref="IDataTypeDefinition"/> by its control Id /// </summary> /// <param name="propertyEditorAlias">Alias of the property editor</param> /// <returns>Collection of <see cref="IDataTypeDefinition"/> objects with a matching contorl id</returns> public IEnumerable<IDataTypeDefinition> GetDataTypeDefinitionByPropertyEditorAlias(string propertyEditorAlias) { using (var repository = RepositoryFactory.CreateDataTypeDefinitionRepository(UowProvider.GetUnitOfWork())) { var query = Query<IDataTypeDefinition>.Builder.Where(x => x.PropertyEditorAlias == propertyEditorAlias); var definitions = repository.GetByQuery(query); return definitions; } } /// <summary> /// Gets all <see cref="IDataTypeDefinition"/> objects or those with the ids passed in /// </summary> /// <param name="ids">Optional array of Ids</param> /// <returns>An enumerable list of <see cref="IDataTypeDefinition"/> objects</returns> public IEnumerable<IDataTypeDefinition> GetAllDataTypeDefinitions(params int[] ids) { using (var repository = RepositoryFactory.CreateDataTypeDefinitionRepository(UowProvider.GetUnitOfWork())) { return repository.GetAll(ids); } } /// <summary> /// Gets all prevalues for an <see cref="IDataTypeDefinition"/> /// </summary> /// <param name="id">Id of the <see cref="IDataTypeDefinition"/> to retrieve prevalues from</param> /// <returns>An enumerable list of string values</returns> public IEnumerable<string> GetPreValuesByDataTypeId(int id) { using (var repository = RepositoryFactory.CreateDataTypeDefinitionRepository(UowProvider.GetUnitOfWork())) { var collection = repository.GetPreValuesCollectionByDataTypeId(id); //now convert the collection to a string list var list = collection.FormatAsDictionary() .Select(x => x.Value.Value) .ToList(); return list; } } /// <summary> /// Returns the PreValueCollection for the specified data type /// </summary> /// <param name="id"></param> /// <returns></returns> public PreValueCollection GetPreValuesCollectionByDataTypeId(int id) { using (var repository = RepositoryFactory.CreateDataTypeDefinitionRepository(UowProvider.GetUnitOfWork())) { return repository.GetPreValuesCollectionByDataTypeId(id); } } /// <summary> /// Gets a specific PreValue by its Id /// </summary> /// <param name="id">Id of the PreValue to retrieve the value from</param> /// <returns>PreValue as a string</returns> public string GetPreValueAsString(int id) { using (var repository = RepositoryFactory.CreateDataTypeDefinitionRepository(UowProvider.GetUnitOfWork())) { return repository.GetPreValueAsString(id); } } public Attempt<OperationStatus<MoveOperationStatusType>> Move(IDataTypeDefinition toMove, int parentId) { var evtMsgs = EventMessagesFactory.Get(); if (Moving.IsRaisedEventCancelled( new MoveEventArgs<IDataTypeDefinition>(evtMsgs, new MoveEventInfo<IDataTypeDefinition>(toMove, toMove.Path, parentId)), this)) { return Attempt.Fail( new OperationStatus<MoveOperationStatusType>( MoveOperationStatusType.FailedCancelledByEvent, evtMsgs)); } var moveInfo = new List<MoveEventInfo<IDataTypeDefinition>>(); var uow = UowProvider.GetUnitOfWork(); using (var containerRepository = RepositoryFactory.CreateEntityContainerRepository(uow, Constants.ObjectTypes.DataTypeContainerGuid)) using (var repository = RepositoryFactory.CreateDataTypeDefinitionRepository(uow)) { try { EntityContainer container = null; if (parentId > 0) { container = containerRepository.Get(parentId); if (container == null) throw new DataOperationException<MoveOperationStatusType>(MoveOperationStatusType.FailedParentNotFound); } moveInfo.AddRange(repository.Move(toMove, container)); } catch (DataOperationException<MoveOperationStatusType> ex) { return Attempt.Fail( new OperationStatus<MoveOperationStatusType>(ex.Operation, evtMsgs)); } uow.Commit(); } Moved.RaiseEvent(new MoveEventArgs<IDataTypeDefinition>(false, evtMsgs, moveInfo.ToArray()), this); return Attempt.Succeed( new OperationStatus<MoveOperationStatusType>(MoveOperationStatusType.Success, evtMsgs)); } /// <summary> /// Saves an <see cref="IDataTypeDefinition"/> /// </summary> /// <param name="dataTypeDefinition"><see cref="IDataTypeDefinition"/> to save</param> /// <param name="userId">Id of the user issueing the save</param> public void Save(IDataTypeDefinition dataTypeDefinition, int userId = 0) { if (Saving.IsRaisedEventCancelled(new SaveEventArgs<IDataTypeDefinition>(dataTypeDefinition), this)) return; if (string.IsNullOrWhiteSpace(dataTypeDefinition.Name)) { throw new ArgumentException("Cannot save datatype with empty name."); } var uow = UowProvider.GetUnitOfWork(); using (var repository = RepositoryFactory.CreateDataTypeDefinitionRepository(uow)) { dataTypeDefinition.CreatorId = userId; repository.AddOrUpdate(dataTypeDefinition); uow.Commit(); Saved.RaiseEvent(new SaveEventArgs<IDataTypeDefinition>(dataTypeDefinition, false), this); } Audit(AuditType.Save, string.Format("Save DataTypeDefinition performed by user"), userId, dataTypeDefinition.Id); } /// <summary> /// Saves a collection of <see cref="IDataTypeDefinition"/> /// </summary> /// <param name="dataTypeDefinitions"><see cref="IDataTypeDefinition"/> to save</param> /// <param name="userId">Id of the user issueing the save</param> public void Save(IEnumerable<IDataTypeDefinition> dataTypeDefinitions, int userId = 0) { Save(dataTypeDefinitions, userId, true); } /// <summary> /// Saves a collection of <see cref="IDataTypeDefinition"/> /// </summary> /// <param name="dataTypeDefinitions"><see cref="IDataTypeDefinition"/> to save</param> /// <param name="userId">Id of the user issueing the save</param> /// <param name="raiseEvents">Boolean indicating whether or not to raise events</param> public void Save(IEnumerable<IDataTypeDefinition> dataTypeDefinitions, int userId, bool raiseEvents) { if (raiseEvents) { if (Saving.IsRaisedEventCancelled(new SaveEventArgs<IDataTypeDefinition>(dataTypeDefinitions), this)) return; } var uow = UowProvider.GetUnitOfWork(); using (var repository = RepositoryFactory.CreateDataTypeDefinitionRepository(uow)) { foreach (var dataTypeDefinition in dataTypeDefinitions) { dataTypeDefinition.CreatorId = userId; repository.AddOrUpdate(dataTypeDefinition); } uow.Commit(); if (raiseEvents) Saved.RaiseEvent(new SaveEventArgs<IDataTypeDefinition>(dataTypeDefinitions, false), this); } Audit(AuditType.Save, string.Format("Save DataTypeDefinition performed by user"), userId, -1); } /// <summary> /// Saves a list of PreValues for a given DataTypeDefinition /// </summary> /// <param name="dataTypeId">Id of the DataTypeDefinition to save PreValues for</param> /// <param name="values">List of string values to save</param> [Obsolete("This should no longer be used, use the alternative SavePreValues or SaveDataTypeAndPreValues methods instead. This will only insert pre-values without keys")] public void SavePreValues(int dataTypeId, IEnumerable<string> values) { //TODO: Should we raise an event here since we are really saving values for the data type? using (var uow = UowProvider.GetUnitOfWork()) { using (var transaction = uow.Database.GetTransaction()) { var sortOrderObj = uow.Database.ExecuteScalar<object>( "SELECT max(sortorder) FROM cmsDataTypePreValues WHERE datatypeNodeId = @DataTypeId", new { DataTypeId = dataTypeId }); int sortOrder; if (sortOrderObj == null || int.TryParse(sortOrderObj.ToString(), out sortOrder) == false) { sortOrder = 1; } foreach (var value in values) { var dto = new DataTypePreValueDto { DataTypeNodeId = dataTypeId, Value = value, SortOrder = sortOrder }; uow.Database.Insert(dto); sortOrder++; } transaction.Complete(); } } } /// <summary> /// Saves/updates the pre-values /// </summary> /// <param name="dataTypeId"></param> /// <param name="values"></param> /// <remarks> /// We need to actually look up each pre-value and maintain it's id if possible - this is because of silly property editors /// like 'dropdown list publishing keys' /// </remarks> public void SavePreValues(int dataTypeId, IDictionary<string, PreValue> values) { var dtd = this.GetDataTypeDefinitionById(dataTypeId); if (dtd == null) { throw new InvalidOperationException("No data type found for id " + dataTypeId); } SavePreValues(dtd, values); } /// <summary> /// Saves/updates the pre-values /// </summary> /// <param name="dataTypeDefinition"></param> /// <param name="values"></param> /// <remarks> /// We need to actually look up each pre-value and maintain it's id if possible - this is because of silly property editors /// like 'dropdown list publishing keys' /// </remarks> public void SavePreValues(IDataTypeDefinition dataTypeDefinition, IDictionary<string, PreValue> values) { //TODO: Should we raise an event here since we are really saving values for the data type? var uow = UowProvider.GetUnitOfWork(); using (var repository = RepositoryFactory.CreateDataTypeDefinitionRepository(uow)) { repository.AddOrUpdatePreValues(dataTypeDefinition, values); uow.Commit(); } } /// <summary> /// This will save a data type and it's pre-values in one transaction /// </summary> /// <param name="dataTypeDefinition"></param> /// <param name="values"></param> /// <param name="userId"></param> public void SaveDataTypeAndPreValues(IDataTypeDefinition dataTypeDefinition, IDictionary<string, PreValue> values, int userId = 0) { if (Saving.IsRaisedEventCancelled(new SaveEventArgs<IDataTypeDefinition>(dataTypeDefinition), this)) return; // if preValues contain the data type, override the data type definition accordingly if (values != null && values.ContainsKey(Constants.PropertyEditors.PreValueKeys.DataValueType)) dataTypeDefinition.DatabaseType = PropertyValueEditor.GetDatabaseType(values[Constants.PropertyEditors.PreValueKeys.DataValueType].Value); var uow = UowProvider.GetUnitOfWork(); using (var repository = RepositoryFactory.CreateDataTypeDefinitionRepository(uow)) { dataTypeDefinition.CreatorId = userId; //add/update the dtd repository.AddOrUpdate(dataTypeDefinition); //add/update the prevalues repository.AddOrUpdatePreValues(dataTypeDefinition, values); uow.Commit(); Saved.RaiseEvent(new SaveEventArgs<IDataTypeDefinition>(dataTypeDefinition, false), this); } Audit(AuditType.Save, string.Format("Save DataTypeDefinition performed by user"), userId, dataTypeDefinition.Id); } /// <summary> /// Deletes an <see cref="IDataTypeDefinition"/> /// </summary> /// <remarks> /// Please note that deleting a <see cref="IDataTypeDefinition"/> will remove /// all the <see cref="PropertyType"/> data that references this <see cref="IDataTypeDefinition"/>. /// </remarks> /// <param name="dataTypeDefinition"><see cref="IDataTypeDefinition"/> to delete</param> /// <param name="userId">Optional Id of the user issueing the deletion</param> public void Delete(IDataTypeDefinition dataTypeDefinition, int userId = 0) { if (Deleting.IsRaisedEventCancelled(new DeleteEventArgs<IDataTypeDefinition>(dataTypeDefinition), this)) return; var uow = UowProvider.GetUnitOfWork(); using (var repository = RepositoryFactory.CreateDataTypeDefinitionRepository(uow)) { repository.Delete(dataTypeDefinition); uow.Commit(); Deleted.RaiseEvent(new DeleteEventArgs<IDataTypeDefinition>(dataTypeDefinition, false), this); } Audit(AuditType.Delete, string.Format("Delete DataTypeDefinition performed by user"), userId, dataTypeDefinition.Id); } /// <summary> /// Gets the <see cref="IDataType"/> specified by it's unique ID /// </summary> /// <param name="id">Id of the DataType, which corresponds to the Guid Id of the control</param> /// <returns><see cref="IDataType"/> object</returns> [Obsolete("IDataType is obsolete and is no longer used, it will be removed from the codebase in future versions")] public IDataType GetDataTypeById(Guid id) { return DataTypesResolver.Current.GetById(id); } /// <summary> /// Gets a complete list of all registered <see cref="IDataType"/>'s /// </summary> /// <returns>An enumerable list of <see cref="IDataType"/> objects</returns> [Obsolete("IDataType is obsolete and is no longer used, it will be removed from the codebase in future versions")] public IEnumerable<IDataType> GetAllDataTypes() { return DataTypesResolver.Current.DataTypes; } private void Audit(AuditType type, string message, int userId, int objectId) { var uow = UowProvider.GetUnitOfWork(); using (var auditRepo = RepositoryFactory.CreateAuditRepository(uow)) { auditRepo.AddOrUpdate(new AuditItem(objectId, message, type, userId)); uow.Commit(); } } #region Event Handlers public static event TypedEventHandler<IDataTypeService, SaveEventArgs<EntityContainer>> SavingContainer; public static event TypedEventHandler<IDataTypeService, SaveEventArgs<EntityContainer>> SavedContainer; public static event TypedEventHandler<IDataTypeService, DeleteEventArgs<EntityContainer>> DeletingContainer; public static event TypedEventHandler<IDataTypeService, DeleteEventArgs<EntityContainer>> DeletedContainer; /// <summary> /// Occurs before Delete /// </summary> public static event TypedEventHandler<IDataTypeService, DeleteEventArgs<IDataTypeDefinition>> Deleting; /// <summary> /// Occurs after Delete /// </summary> public static event TypedEventHandler<IDataTypeService, DeleteEventArgs<IDataTypeDefinition>> Deleted; /// <summary> /// Occurs before Save /// </summary> public static event TypedEventHandler<IDataTypeService, SaveEventArgs<IDataTypeDefinition>> Saving; /// <summary> /// Occurs after Save /// </summary> public static event TypedEventHandler<IDataTypeService, SaveEventArgs<IDataTypeDefinition>> Saved; /// <summary> /// Occurs before Move /// </summary> public static event TypedEventHandler<IDataTypeService, MoveEventArgs<IDataTypeDefinition>> Moving; /// <summary> /// Occurs after Move /// </summary> public static event TypedEventHandler<IDataTypeService, MoveEventArgs<IDataTypeDefinition>> Moved; #endregion } }
/* **************************************************************************** * * Copyright (c) Microsoft Corporation. * * This source code is subject to terms and conditions of the Apache License, Version 2.0. A * copy of the license can be found in the License.html file at the root of this distribution. If * you cannot locate the Apache License, Version 2.0, please send an email to * [email protected]. By using this source code in any fashion, you are agreeing to be bound * by the terms of the Apache License, Version 2.0. * * You must not remove this notice, or any other, from this software. * * ***************************************************************************/ using System; using System.Collections.Generic; using SRC = System.Runtime.CompilerServices; namespace Microsoft.PythonTools.Analysis { static class IdDispenser { // The one and only comparer instance. private static readonly IEqualityComparer<object> _comparer = new WrapperComparer(); private static Dictionary<object, object> _hashtable = new Dictionary<object, object>(_comparer); private static readonly Object _synchObject = new Object(); // The one and only global lock instance. // We do not need to worry about duplicates that to using long for unique Id. // It takes more than 100 years to overflow long on year 2005 hardware. private static long _currentId = 0; // Last unique Id we have given out. // cleanupId and cleanupGC are used for efficient scheduling of hashtable cleanups private static long _cleanupId; // currentId at the time of last cleanup private static int _cleanupGC; // GC.CollectionCount(0) at the time of last cleanup public static void Clear() { lock (_synchObject) { _hashtable.Clear(); _currentId = 0; } } /// <summary> /// Given an ID returns the object associated with that ID. /// </summary> public static object GetObject(long id) { lock (_synchObject) { foreach (Wrapper w in _hashtable.Keys) { if (w.Target != null) { if (w.Id == id) return w.Target; } } return null; } } /// <summary> /// Gets a unique ID for an object if it has been assigned one. /// </summary> public static bool TryGetId(Object o, out long id) { if (o == null) { id = 0; return true; } lock (_synchObject) { // If the object exists then return its existing ID. object res; if (_hashtable.TryGetValue(o, out res)) { id = ((Wrapper)res).Id; return true; } } id = 0; return false; } /// <summary> /// Gets a unique ID for an object /// </summary> public static long GetId(Object o) { if (o == null) return 0; lock (_synchObject) { // If the object exists then return its existing ID. object res; if (_hashtable.TryGetValue(o, out res)) { return ((Wrapper)res).Id; } long uniqueId = checked(++_currentId); long change = uniqueId - _cleanupId; // Cleanup the table if it is a while since we have done it last time. // Take the size of the table into account. if (change > 1234 + _hashtable.Count / 2) { // It makes sense to do the cleanup only if a GC has happened in the meantime. // WeakReferences can become zero only during the GC. int currentGC = GC.CollectionCount(0); if (currentGC != _cleanupGC) { Cleanup(); _cleanupId = uniqueId; _cleanupGC = currentGC; } else { _cleanupId += 1234; } } Wrapper w = new Wrapper(o, uniqueId); _hashtable[w] = w; return uniqueId; } } /// <summary> /// Goes over the hashtable and removes empty entries /// </summary> private static void Cleanup() { int liveCount = 0; int emptyCount = 0; foreach (Wrapper w in _hashtable.Keys) { if (w.Target != null) liveCount++; else emptyCount++; } // Rehash the table if there is a significant number of empty slots if (emptyCount > liveCount / 4) { Dictionary<object, object> newtable = new Dictionary<object, object>(liveCount + liveCount / 4, _comparer); foreach (Wrapper w in _hashtable.Keys) { if (w.Target != null) newtable[w] = w; } _hashtable = newtable; } } /// <summary> /// Weak-ref wrapper caches the weak reference, our hash code, and the object ID. /// </summary> private sealed class Wrapper { private WeakReference _weakReference; private int _hashCode; private long _id; public Wrapper(Object obj, long uniqueId) { _weakReference = new WeakReference(obj, true); _hashCode = (obj == null) ? 0 : SRC.RuntimeHelpers.GetHashCode(obj); _id = uniqueId; } public long Id { get { return _id; } } public Object Target { get { return _weakReference.Target; } } public override int GetHashCode() { return _hashCode; } } /// <summary> /// WrapperComparer treats Wrapper as transparent envelope /// </summary> private sealed class WrapperComparer : IEqualityComparer<object> { bool IEqualityComparer<object>.Equals(Object x, Object y) { Wrapper wx = x as Wrapper; if (wx != null) x = wx.Target; Wrapper wy = y as Wrapper; if (wy != null) y = wy.Target; return Object.ReferenceEquals(x, y); } int IEqualityComparer<object>.GetHashCode(Object obj) { Wrapper wobj = obj as Wrapper; if (wobj != null) return wobj.GetHashCode(); return GetHashCodeWorker(obj); } private static int GetHashCodeWorker(object o) { if (o == null) return 0; return SRC.RuntimeHelpers.GetHashCode(o); } } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Collections.Immutable; using System.ComponentModel.Composition; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.Diagnostics; using Microsoft.CodeAnalysis.Notification; using Microsoft.CodeAnalysis.Shared.TestHooks; using Microsoft.VisualStudio.LanguageServices.Implementation.ProjectSystem; using Microsoft.VisualStudio.LanguageServices.Implementation.Venus; using Microsoft.VisualStudio.Shell; using Roslyn.Utilities; namespace Microsoft.VisualStudio.LanguageServices.Implementation.TaskList { [Export(typeof(ExternalErrorDiagnosticUpdateSource))] internal class ExternalErrorDiagnosticUpdateSource : IDiagnosticUpdateSource { private readonly Workspace _workspace; private readonly IDiagnosticAnalyzerService _diagnosticService; private readonly IGlobalOperationNotificationService _notificationService; private readonly SimpleTaskQueue _taskQueue; private readonly IAsynchronousOperationListener _listener; private InprogressState _state = null; private ImmutableArray<DiagnosticData> _lastBuiltResult = ImmutableArray<DiagnosticData>.Empty; [ImportingConstructor] public ExternalErrorDiagnosticUpdateSource( VisualStudioWorkspaceImpl workspace, IDiagnosticAnalyzerService diagnosticService, IDiagnosticUpdateSourceRegistrationService registrationService, [ImportMany] IEnumerable<Lazy<IAsynchronousOperationListener, FeatureMetadata>> asyncListeners) : this(workspace, diagnosticService, registrationService, new AggregateAsynchronousOperationListener(asyncListeners, FeatureAttribute.ErrorList)) { Contract.Requires(!KnownUIContexts.SolutionBuildingContext.IsActive); KnownUIContexts.SolutionBuildingContext.UIContextChanged += OnSolutionBuild; } /// <summary> /// internal for testing /// </summary> internal ExternalErrorDiagnosticUpdateSource( Workspace workspace, IDiagnosticAnalyzerService diagnosticService, IDiagnosticUpdateSourceRegistrationService registrationService, IAsynchronousOperationListener listener) { // use queue to serialize work. no lock needed _taskQueue = new SimpleTaskQueue(TaskScheduler.Default); _listener = listener; _workspace = workspace; _workspace.WorkspaceChanged += OnWorkspaceChanged; _diagnosticService = diagnosticService; _notificationService = _workspace.Services.GetService<IGlobalOperationNotificationService>(); registrationService.Register(this); } public event EventHandler<bool> BuildStarted; public event EventHandler<DiagnosticsUpdatedArgs> DiagnosticsUpdated; public bool IsInProgress => _state != null; public ImmutableArray<DiagnosticData> GetBuildErrors() { return _lastBuiltResult; } public bool SupportedDiagnosticId(ProjectId projectId, string id) { return _state?.SupportedDiagnosticId(projectId, id) ?? false; } public void ClearErrors(ProjectId projectId) { // capture state if it exists var state = _state; var asyncToken = _listener.BeginAsyncOperation("ClearErrors"); _taskQueue.ScheduleTask(() => { // record the project as built only if we are in build. // otherwise (such as closing solution or removing project), no need to record it state?.Built(projectId); ClearProjectErrors(state?.Solution ?? _workspace.CurrentSolution, projectId); }).CompletesAsyncOperation(asyncToken); } private void OnWorkspaceChanged(object sender, WorkspaceChangeEventArgs e) { switch (e.Kind) { case WorkspaceChangeKind.SolutionAdded: case WorkspaceChangeKind.SolutionRemoved: case WorkspaceChangeKind.SolutionCleared: case WorkspaceChangeKind.SolutionReloaded: { var asyncToken = _listener.BeginAsyncOperation("OnSolutionChanged"); _taskQueue.ScheduleTask(() => e.OldSolution.ProjectIds.Do(p => ClearProjectErrors(e.OldSolution, p))).CompletesAsyncOperation(asyncToken); break; } case WorkspaceChangeKind.ProjectRemoved: case WorkspaceChangeKind.ProjectReloaded: { var asyncToken = _listener.BeginAsyncOperation("OnProjectChanged"); _taskQueue.ScheduleTask(() => ClearProjectErrors(e.OldSolution, e.ProjectId)).CompletesAsyncOperation(asyncToken); break; } case WorkspaceChangeKind.DocumentRemoved: case WorkspaceChangeKind.DocumentReloaded: { var asyncToken = _listener.BeginAsyncOperation("OnDocumentRemoved"); _taskQueue.ScheduleTask(() => ClearDocumentErrors(e.OldSolution, e.ProjectId, e.DocumentId)).CompletesAsyncOperation(asyncToken); break; } case WorkspaceChangeKind.ProjectAdded: case WorkspaceChangeKind.DocumentAdded: case WorkspaceChangeKind.DocumentChanged: case WorkspaceChangeKind.ProjectChanged: case WorkspaceChangeKind.SolutionChanged: case WorkspaceChangeKind.AdditionalDocumentAdded: case WorkspaceChangeKind.AdditionalDocumentRemoved: case WorkspaceChangeKind.AdditionalDocumentReloaded: case WorkspaceChangeKind.AdditionalDocumentChanged: break; default: Contract.Fail("Unknown workspace events"); break; } } internal void OnSolutionBuild(object sender, UIContextChangedEventArgs e) { if (e.Activated) { // build just started, create the state and fire build in progress event. var state = GetOrCreateInprogressState(); return; } // get local copy of inprogress state var inprogressState = _state; // building is done. reset the state. Interlocked.CompareExchange(ref _state, null, inprogressState); // enqueue build/live sync in the queue. var asyncToken = _listener.BeginAsyncOperation("OnSolutionBuild"); _taskQueue.ScheduleTask(async () => { // nothing to do if (inprogressState == null) { return; } _lastBuiltResult = inprogressState.GetBuildDiagnostics(); // we are about to update live analyzer data using one from build. // pause live analyzer using (var operation = _notificationService.Start("BuildDone")) { Func<DiagnosticData, bool> liveDiagnosticChecker = d => { // REVIEW: we probably need a better design on de-duplicating live and build errors. or don't de-dup at all. // for now, we are special casing compiler error case. var project = inprogressState.Solution.GetProject(d.ProjectId); if (project == null) { // project doesn't exist return false; } // REVIEW: current design is that we special case compiler analyzer case and we accept only document level // diagnostic as live. otherwise, we let them be build errors. we changed compiler analyzer accordingly as well // so that it doesn't report project level diagnostic as live errors. if (_diagnosticService.IsCompilerDiagnostic(project.Language, d) && d.DocumentId == null) { // compiler error but project level error return false; } if (inprogressState.SupportedDiagnosticId(d.ProjectId, d.Id)) { return true; } return false; }; var diagnosticService = _diagnosticService as DiagnosticAnalyzerService; if (diagnosticService != null) { await CleanupAllLiveErrorsIfNeededAsync(diagnosticService, inprogressState.Solution, inprogressState).ConfigureAwait(false); await SyncBuildErrorsAndReportAsync(diagnosticService, inprogressState.Solution, inprogressState.GetLiveDiagnosticsPerProject(liveDiagnosticChecker)).ConfigureAwait(false); } inprogressState.Done(); } }).CompletesAsyncOperation(asyncToken); } private async System.Threading.Tasks.Task CleanupAllLiveErrorsIfNeededAsync(DiagnosticAnalyzerService diagnosticService, Solution solution, InprogressState state) { if (_workspace.Options.GetOption(InternalDiagnosticsOptions.BuildErrorIsTheGod)) { await CleanupAllLiveErrors(diagnosticService, solution.ProjectIds).ConfigureAwait(false); return; } if (_workspace.Options.GetOption(InternalDiagnosticsOptions.ClearLiveErrorsForProjectBuilt)) { await CleanupAllLiveErrors(diagnosticService, state.GetProjectsBuilt(solution)).ConfigureAwait(false); return; } await CleanupAllLiveErrors(diagnosticService, state.GetProjectsWithoutErrors(solution)).ConfigureAwait(false); return; } private System.Threading.Tasks.Task CleanupAllLiveErrors(DiagnosticAnalyzerService diagnosticService, IEnumerable<ProjectId> projects) { var map = projects.ToImmutableDictionary(p => p, _ => ImmutableArray<DiagnosticData>.Empty); return diagnosticService.SynchronizeWithBuildAsync(_workspace, map); } private async System.Threading.Tasks.Task SyncBuildErrorsAndReportAsync( DiagnosticAnalyzerService diagnosticService, Solution solution, ImmutableDictionary<ProjectId, ImmutableArray<DiagnosticData>> map) { // make those errors live errors await diagnosticService.SynchronizeWithBuildAsync(_workspace, map).ConfigureAwait(false); // raise events for ones left-out var buildErrors = GetBuildErrors().Except(map.Values.SelectMany(v => v)).GroupBy(k => k.DocumentId); foreach (var group in buildErrors) { if (group.Key == null) { foreach (var projectGroup in group.GroupBy(g => g.ProjectId)) { Contract.ThrowIfNull(projectGroup.Key); ReportBuildErrors(projectGroup.Key, solution, projectGroup.ToImmutableArray()); } continue; } ReportBuildErrors(group.Key, solution, group.ToImmutableArray()); } } private void ReportBuildErrors<T>(T item, Solution solution, ImmutableArray<DiagnosticData> buildErrors) { var projectId = item as ProjectId; if (projectId != null) { RaiseDiagnosticsCreated(projectId, solution, projectId, null, buildErrors); return; } // must be not null var documentId = item as DocumentId; RaiseDiagnosticsCreated(documentId, solution, documentId.ProjectId, documentId, buildErrors); } private void ClearProjectErrors(Solution solution, ProjectId projectId) { // remove all project errors RaiseDiagnosticsRemoved(projectId, solution, projectId, documentId: null); var project = solution.GetProject(projectId); if (project == null) { return; } // remove all document errors foreach (var documentId in project.DocumentIds) { ClearDocumentErrors(solution, projectId, documentId); } } private void ClearDocumentErrors(Solution solution, ProjectId projectId, DocumentId documentId) { RaiseDiagnosticsRemoved(documentId, solution, projectId, documentId); } public void AddNewErrors(ProjectId projectId, DiagnosticData diagnostic) { // capture state that will be processed in background thread. var state = GetOrCreateInprogressState(); var asyncToken = _listener.BeginAsyncOperation("Project New Errors"); _taskQueue.ScheduleTask(() => { state.AddError(projectId, diagnostic); }).CompletesAsyncOperation(asyncToken); } public void AddNewErrors(DocumentId documentId, DiagnosticData diagnostic) { // capture state that will be processed in background thread. var state = GetOrCreateInprogressState(); var asyncToken = _listener.BeginAsyncOperation("Document New Errors"); _taskQueue.ScheduleTask(() => { state.AddError(documentId, diagnostic); }).CompletesAsyncOperation(asyncToken); } public void AddNewErrors( ProjectId projectId, HashSet<DiagnosticData> projectErrors, Dictionary<DocumentId, HashSet<DiagnosticData>> documentErrorMap) { // capture state that will be processed in background thread var state = GetOrCreateInprogressState(); var asyncToken = _listener.BeginAsyncOperation("Project New Errors"); _taskQueue.ScheduleTask(() => { foreach (var kv in documentErrorMap) { state.AddErrors(kv.Key, kv.Value); } state.AddErrors(projectId, projectErrors); }).CompletesAsyncOperation(asyncToken); } private InprogressState GetOrCreateInprogressState() { if (_state == null) { // here, we take current snapshot of solution when the state is first created. and through out this code, we use this snapshot. // since we have no idea what actual snapshot of solution the out of proc build has picked up, it doesn't remove the race we can have // between build and diagnostic service, but this at least make us to consistent inside of our code. Interlocked.CompareExchange(ref _state, new InprogressState(this, _workspace.CurrentSolution), null); } return _state; } private void RaiseDiagnosticsCreated(object id, Solution solution, ProjectId projectId, DocumentId documentId, ImmutableArray<DiagnosticData> items) { DiagnosticsUpdated?.Invoke(this, DiagnosticsUpdatedArgs.DiagnosticsCreated( CreateArgumentKey(id), _workspace, solution, projectId, documentId, items)); } private void RaiseDiagnosticsRemoved(object id, Solution solution, ProjectId projectId, DocumentId documentId) { DiagnosticsUpdated?.Invoke(this, DiagnosticsUpdatedArgs.DiagnosticsRemoved( CreateArgumentKey(id), _workspace, solution, projectId, documentId)); } private static ArgumentKey CreateArgumentKey(object id) => new ArgumentKey(id); private void RaiseBuildStarted(bool started) { BuildStarted?.Invoke(this, started); } #region not supported public bool SupportGetDiagnostics { get { return false; } } public ImmutableArray<DiagnosticData> GetDiagnostics( Workspace workspace, ProjectId projectId, DocumentId documentId, object id, bool includeSuppressedDiagnostics = false, CancellationToken cancellationToken = default(CancellationToken)) { return ImmutableArray<DiagnosticData>.Empty; } #endregion private class InprogressState { private readonly ExternalErrorDiagnosticUpdateSource _owner; private readonly Solution _solution; private readonly HashSet<ProjectId> _builtProjects = new HashSet<ProjectId>(); private readonly Dictionary<ProjectId, HashSet<DiagnosticData>> _projectMap = new Dictionary<ProjectId, HashSet<DiagnosticData>>(); private readonly Dictionary<DocumentId, HashSet<DiagnosticData>> _documentMap = new Dictionary<DocumentId, HashSet<DiagnosticData>>(); private readonly Dictionary<ProjectId, HashSet<string>> _diagnosticIdMap = new Dictionary<ProjectId, HashSet<string>>(); public InprogressState(ExternalErrorDiagnosticUpdateSource owner, Solution solution) { _owner = owner; _solution = solution; // let people know build has started // TODO: to be more accurate, it probably needs to be counted. but for now, // I think the way it is doing probably enough. _owner.RaiseBuildStarted(started: true); } public Solution Solution => _solution; public void Done() { _owner.RaiseBuildStarted(started: false); } public bool SupportedDiagnosticId(ProjectId projectId, string id) { HashSet<string> ids; if (_diagnosticIdMap.TryGetValue(projectId, out ids)) { return ids.Contains(id); } // set ids set var map = new HashSet<string>(); _diagnosticIdMap.Add(projectId, map); var project = _solution.GetProject(projectId); if (project == null) { // projectId no longer exist, return false; return false; } var descriptorMap = _owner._diagnosticService.GetDiagnosticDescriptors(project); map.UnionWith(descriptorMap.Values.SelectMany(v => v.Select(d => d.Id))); return map.Contains(id); } public ImmutableArray<DiagnosticData> GetBuildDiagnostics() { return ImmutableArray.CreateRange(_projectMap.Values.SelectMany(d => d).Concat(_documentMap.Values.SelectMany(d => d))); } public void Built(ProjectId projectId) { _builtProjects.Add(projectId); } public IEnumerable<ProjectId> GetProjectsBuilt(Solution solution) { return solution.ProjectIds.Where(p => _builtProjects.Contains(p)); } public IEnumerable<ProjectId> GetProjectsWithErrors(Solution solution) { return GetProjectIds().Where(p => solution.GetProject(p) != null); } public IEnumerable<ProjectId> GetProjectsWithoutErrors(Solution solution) { return GetProjectsBuilt(solution).Except(GetProjectsWithErrors(solution)); } public ImmutableDictionary<ProjectId, ImmutableArray<DiagnosticData>> GetLiveDiagnosticsPerProject(Func<DiagnosticData, bool> liveDiagnosticChecker) { var builder = ImmutableDictionary.CreateBuilder<ProjectId, ImmutableArray<DiagnosticData>>(); foreach (var projectId in GetProjectIds()) { var diagnostics = ImmutableArray.CreateRange( _projectMap.Where(kv => kv.Key == projectId).SelectMany(kv => kv.Value).Concat( _documentMap.Where(kv => kv.Key.ProjectId == projectId).SelectMany(kv => kv.Value)).Where(liveDiagnosticChecker)); builder.Add(projectId, diagnostics); } return builder.ToImmutable(); } public void AddErrors(DocumentId key, HashSet<DiagnosticData> diagnostics) { AddErrors(_documentMap, key, diagnostics); } public void AddErrors(ProjectId key, HashSet<DiagnosticData> diagnostics) { AddErrors(_projectMap, key, diagnostics); } public void AddError(DocumentId key, DiagnosticData diagnostic) { AddError(_documentMap, key, diagnostic); } public void AddError(ProjectId key, DiagnosticData diagnostic) { AddError(_projectMap, key, diagnostic); } private void AddErrors<T>(Dictionary<T, HashSet<DiagnosticData>> map, T key, HashSet<DiagnosticData> diagnostics) { var errors = GetErrorSet(map, key); errors.UnionWith(diagnostics); } private void AddError<T>(Dictionary<T, HashSet<DiagnosticData>> map, T key, DiagnosticData diagnostic) { var errors = GetErrorSet(map, key); errors.Add(diagnostic); } private IEnumerable<ProjectId> GetProjectIds() { return _documentMap.Keys.Select(k => k.ProjectId).Concat(_projectMap.Keys).Distinct(); } private HashSet<DiagnosticData> GetErrorSet<T>(Dictionary<T, HashSet<DiagnosticData>> map, T key) { return map.GetOrAdd(key, _ => new HashSet<DiagnosticData>(DiagnosticDataComparer.Instance)); } } private class ArgumentKey : BuildToolId.Base<object> { public ArgumentKey(object key) : base(key) { } public override string BuildTool { get { return PredefinedBuildTools.Build; } } public override bool Equals(object obj) { var other = obj as ArgumentKey; if (other == null) { return false; } return base.Equals(obj); } public override int GetHashCode() { return base.GetHashCode(); } } private class DiagnosticDataComparer : IEqualityComparer<DiagnosticData> { public static readonly DiagnosticDataComparer Instance = new DiagnosticDataComparer(); public bool Equals(DiagnosticData item1, DiagnosticData item2) { // crash if any one of them is NULL if ((IsNull(item1.DocumentId) ^ IsNull(item2.DocumentId)) || (IsNull(item1.ProjectId) ^ IsNull(item2.ProjectId))) { return false; } if (item1.DocumentId != null && item2.DocumentId != null) { var lineColumn1 = GetOriginalOrMappedLineColumn(item1); var lineColumn2 = GetOriginalOrMappedLineColumn(item2); return item1.Id == item2.Id && item1.Message == item2.Message && item1.ProjectId == item2.ProjectId && item1.DocumentId == item2.DocumentId && lineColumn1.Item1 == lineColumn2.Item1 && lineColumn1.Item2 == lineColumn2.Item2 && item1.Severity == item2.Severity; } return item1.Id == item2.Id && item1.Message == item2.Message && item1.ProjectId == item2.ProjectId && item1.Severity == item2.Severity; } public int GetHashCode(DiagnosticData obj) { if (obj.DocumentId != null) { var lineColumn = GetOriginalOrMappedLineColumn(obj); return Hash.Combine(obj.Id, Hash.Combine(obj.Message, Hash.Combine(obj.ProjectId, Hash.Combine(obj.DocumentId, Hash.Combine(lineColumn.Item1, Hash.Combine(lineColumn.Item2, (int)obj.Severity)))))); } return Hash.Combine(obj.Id, Hash.Combine(obj.Message, Hash.Combine(obj.ProjectId, (int)obj.Severity))); } private static ValueTuple<int, int> GetOriginalOrMappedLineColumn(DiagnosticData data) { var workspace = data.Workspace as VisualStudioWorkspaceImpl; if (workspace == null) { return ValueTuple.Create(data.DataLocation?.MappedStartLine ?? 0, data.DataLocation?.MappedStartColumn ?? 0); } var containedDocument = workspace.GetHostDocument(data.DocumentId) as ContainedDocument; if (containedDocument == null) { return ValueTuple.Create(data.DataLocation?.MappedStartLine ?? 0, data.DataLocation?.MappedStartColumn ?? 0); } return ValueTuple.Create(data.DataLocation?.OriginalStartLine ?? 0, data.DataLocation?.OriginalStartColumn ?? 0); } private bool IsNull<T>(T item) where T : class { return item == null; } } } }
using System; using System.Collections.Generic; using System.Text; using System.IO; using System.Windows.Documents.Serialization; using System.Windows.Documents; using System.Windows.Markup; using System.Windows.Media; using System.Printing; using System.Windows; using System.Xml; namespace DocumentSerialization { class HtmlSerializerWriter : SerializerWriter { public HtmlSerializerWriter(Stream stream) { _stream = stream; } /// <summary> /// Write a single Visual and close package /// </summary> public override void Write(Visual visual) { Write(visual, null); } /// <summary> /// Write a single Visual and close package /// </summary> public override void Write(Visual visual, PrintTicket printTicket) { SerializeObjectTree(visual); } /// <summary> /// Asynchronous Write a single Visual and close package /// </summary> public override void WriteAsync(Visual visual) { throw new NotSupportedException(); } /// <summary> /// Asynchronous Write a single Visual and close package /// </summary> public override void WriteAsync(Visual visual, object userState) { throw new NotSupportedException(); } /// <summary> /// Asynchronous Write a single Visual and close package /// </summary> public override void WriteAsync(Visual visual, PrintTicket printTicket) { throw new NotSupportedException(); } /// <summary> /// Asynchronous Write a single Visual and close package /// </summary> public override void WriteAsync(Visual visual, PrintTicket printTicket, object userState) { throw new NotSupportedException(); } /// <summary> /// Write a single DocumentPaginator and close package /// </summary> public override void Write(DocumentPaginator documentPaginator) { Write(documentPaginator, null); } /// <summary> /// Write a single DocumentPaginator and close package /// </summary> public override void Write(DocumentPaginator documentPaginator, PrintTicket printTicket) { SerializeObjectTree(documentPaginator.Source); } /// <summary> /// Asynchronous Write a single DocumentPaginator and close package /// </summary> public override void WriteAsync(DocumentPaginator documentPaginator) { throw new NotSupportedException(); } /// <summary> /// Asynchronous Write a single DocumentPaginator and close package /// </summary> public override void WriteAsync(DocumentPaginator documentPaginator, PrintTicket printTicket) { throw new NotSupportedException(); } /// <summary> /// Asynchronous Write a single DocumentPaginator and close package /// </summary> public override void WriteAsync(DocumentPaginator documentPaginator, object userState) { throw new NotSupportedException(); } /// <summary> /// Asynchronous Write a single DocumentPaginator and close package /// </summary> public override void WriteAsync(DocumentPaginator documentPaginator, PrintTicket printTicket, object userState) { throw new NotSupportedException(); } /// <summary> /// Write a single FixedPage and close package /// </summary> public override void Write(FixedPage fixedPage) { Write(fixedPage, null); } /// <summary> /// Write a single FixedPage and close package /// </summary> public override void Write(FixedPage fixedPage, PrintTicket printTicket) { SerializeObjectTree(fixedPage); } /// <summary> /// Asynchronous Write a single FixedPage and close package /// </summary> public override void WriteAsync(FixedPage fixedPage) { throw new NotSupportedException(); } /// <summary> /// Asynchronous Write a single FixedPage and close package /// </summary> public override void WriteAsync(FixedPage fixedPage, PrintTicket printTicket) { throw new NotSupportedException(); } /// <summary> /// Asynchronous Write a single FixedPage and close package /// </summary> public override void WriteAsync(FixedPage fixedPage, object userState) { throw new NotSupportedException(); } /// <summary> /// Asynchronous Write a single FixedPage and close package /// </summary> public override void WriteAsync(FixedPage fixedPage, PrintTicket printTicket, object userState) { throw new NotSupportedException(); } /// <summary> /// Write a single FixedDocument and close package /// </summary> public override void Write(FixedDocument fixedDocument) { Write(fixedDocument, null); } /// <summary> /// Write a single FixedDocument and close package /// </summary> public override void Write(FixedDocument fixedDocument, PrintTicket printTicket) { SerializeObjectTree(fixedDocument); } /// <summary> /// Asynchronous Write a single FixedDocument and close package /// </summary> public override void WriteAsync(FixedDocument fixedDocument) { throw new NotSupportedException(); } /// <summary> /// Asynchronous Write a single FixedDocument and close package /// </summary> public override void WriteAsync(FixedDocument fixedDocument, PrintTicket printTicket) { throw new NotSupportedException(); } /// <summary> /// Asynchronous Write a single FixedDocument and close package /// </summary> public override void WriteAsync(FixedDocument fixedDocument, object userState) { throw new NotSupportedException(); } /// <summary> /// Asynchronous Write a single FixedDocument and close package /// </summary> public override void WriteAsync(FixedDocument fixedDocument, PrintTicket printTicket, object userState) { throw new NotSupportedException(); } /// <summary> /// Write a single FixedDocumentSequence and close package /// </summary> public override void Write(FixedDocumentSequence fixedDocumentSequence) { Write(fixedDocumentSequence, null); } /// <summary> /// Write a single FixedDocumentSequence and close package /// </summary> public override void Write(FixedDocumentSequence fixedDocumentSequence, PrintTicket printTicket) { SerializeObjectTree(fixedDocumentSequence); } /// <summary> /// Asynchronous Write a single FixedDocumentSequence and close package /// </summary> public override void WriteAsync(FixedDocumentSequence fixedDocumentSequence) { throw new NotSupportedException(); } /// <summary> /// Asynchronous Write a single FixedDocumentSequence and close package /// </summary> public override void WriteAsync(FixedDocumentSequence fixedDocumentSequence, PrintTicket printTicket) { throw new NotSupportedException(); } /// <summary> /// Asynchronous Write a single FixedDocumentSequence and close package /// </summary> public override void WriteAsync(FixedDocumentSequence fixedDocumentSequence, object userState) { throw new NotSupportedException(); } /// <summary> /// Asynchronous Write a single FixedDocumentSequence and close package /// </summary> public override void WriteAsync(FixedDocumentSequence fixedDocumentSequence, PrintTicket printTicket, object userState) { throw new NotSupportedException(); } /// <summary> /// Cancel Asynchronous Write /// </summary> public override void CancelAsync() { throw new NotSupportedException(); } /// <summary> /// Create a SerializerWriterCollator to gobble up multiple Visuals /// </summary> public override SerializerWriterCollator CreateVisualsCollator() { throw new NotSupportedException(); } /// <summary> /// Create a SerializerWriterCollator to gobble up multiple Visuals /// </summary> public override SerializerWriterCollator CreateVisualsCollator(PrintTicket documentSequencePT, PrintTicket documentPT) { throw new NotSupportedException(); } #pragma warning disable 0067 /// <summary> /// This event will be invoked if the writer wants a PrintTicker /// </summary> public override event WritingPrintTicketRequiredEventHandler WritingPrintTicketRequired; /// <summary> /// This event will be invoked if the writer progress changes /// </summary> public override event WritingProgressChangedEventHandler WritingProgressChanged; /// <summary> /// This event will be invoked if the writer is done /// </summary> public override event WritingCompletedEventHandler WritingCompleted; /// <summary> /// This event will be invoked if the writer has been cancelled /// </summary> public override event WritingCancelledEventHandler WritingCancelled; #pragma warning restore 0067 private void SerializeObjectTree(object objectTree) { TextWriter writer = new StreamWriter(_stream); try { string fileContent = HtmlFromXamlConverter.ConvertXamlToHtml( XamlWriter.Save(objectTree)); writer.Write(fileContent); } finally { if (writer != null) writer.Close(); } } private Stream _stream; } }
using System; using System.IO; using NUnit.Framework; using SIL.IO; using SIL.PlatformUtilities; namespace SIL.Tests.IO { [TestFixture] class FileLocationUtilitiesTests { [Test] public void GetFileDistributedWithApplication_MultipleParts_FindsCorrectly() { var path = FileLocationUtilities.GetFileDistributedWithApplication("DirectoryForTests", "SampleFileForTests.txt"); Assert.That(File.Exists(path)); } [Test] public void GetDirectoryDistributedWithApplication_MultipleParts_FindsCorrectly() { var path = FileLocationUtilities.GetDirectoryDistributedWithApplication("DirectoryForTests"); Assert.That(Directory.Exists(path)); } [Test] public void GetDirectoryDistributedWithApplication_WhenFails_ReportsAllTried() { try { FileLocationUtilities.GetDirectoryDistributedWithApplication("LookHere", "ThisWillNotExist"); } catch (ArgumentException ex) { Assert.That(ex.Message, Is.StringContaining(Path.Combine("LookHere", "ThisWillNotExist"))); Assert.That(ex.Message, Is.StringContaining(FileLocationUtilities.DirectoryOfApplicationOrSolution)); Assert.That(ex.Message, Is.StringContaining("DistFiles")); Assert.That(ex.Message, Is.StringContaining("src")); } } [Test] public void DirectoryOfApplicationOrSolution_OnDevMachine_FindsOutputDirectory() { var path = FileLocationUtilities.DirectoryOfTheApplicationExecutable; Assert.That(Directory.Exists(path)); Assert.That(path.Contains("output")); } [Test] public void LocateInProgramFiles_SendInvalidProgramNoDeepSearch_ReturnsNull() { Assert.IsNull(FileLocationUtilities.LocateInProgramFiles("blah.exe", false)); } // 12 SEP 2013, Phil Hopper: This test not valid on Mono. [Test] [Platform(Exclude = "Unix")] [Category("SkipOnTeamCity;KnownMonoIssue")] public void LocateInProgramFiles_SendValidProgramNoDeepSearch_ReturnsNull() { Assert.IsNull(FileLocationUtilities.LocateInProgramFiles("msinfo32.exe", false)); } [Test] public void LocateInProgramFiles_SendValidProgramDeepSearch_ReturnsProgramPath() { var findFile = (Platform.IsMono ? "bash" : "msinfo32.exe"); Assert.IsNotNull(FileLocationUtilities.LocateInProgramFiles(findFile, true)); } [Test] public void LocateInProgramFiles_SendValidProgramDeepSearch_SubFolderSpecified_ReturnsProgramPath() { var findFile = (Platform.IsMono ? "bash" : "msinfo32.exe"); // this will work on Mono because it ignores the subFoldersToSearch parameter Assert.IsNotNull(FileLocationUtilities.LocateInProgramFiles(findFile, true, "Common Files")); } [Test] public void LocateInProgramFiles_SendInValidSubFolder_DoesNotThrow() { var findFile = (Platform.IsMono ? "bash" : "msinfo32.exe"); Assert.DoesNotThrow(() => FileLocationUtilities.LocateInProgramFiles(findFile, true, "!~@blah")); } [Test] [Platform(Include = "Linux")] public void LocateInProgramFiles_DeepSearch_FindsFileInSubdir() { // This simulates finding RAMP which is installed as /opt/RAMP/ramp. We can't put // anything in /opt for testing, so we add our tmp directory to the path. // Setup var simulatedOptDir = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName()); var pathVariable = Environment.GetEnvironmentVariable("PATH"); try { Directory.CreateDirectory(simulatedOptDir); Directory.CreateDirectory(Path.Combine(simulatedOptDir, "RAMP")); var file = Path.Combine(simulatedOptDir, "RAMP", "ramp"); File.WriteAllText(file, "Simulated RAMP starter"); Environment.SetEnvironmentVariable("PATH", $"{simulatedOptDir}{Path.PathSeparator}{pathVariable}"); // Exercise/Verify Assert.That(FileLocationUtilities.LocateInProgramFiles("ramp", true), Is.EqualTo(file)); } finally { try { Environment.SetEnvironmentVariable("PATH", pathVariable); Directory.Delete(simulatedOptDir, true); } catch { // just ignore } } } [Test] [Platform(Include = "Linux")] public void LocateInProgramFiles_ShallowSearch_FindsNothing() { // This simulates finding RAMP which is installed as /opt/RAMP/ramp. We can't put // anything in /opt for testing, so we add our tmp directory to the path. // Setup var simulatedOptDir = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName()); var pathVariable = Environment.GetEnvironmentVariable("PATH"); try { Directory.CreateDirectory(simulatedOptDir); Directory.CreateDirectory(Path.Combine(simulatedOptDir, "RAMP")); var file = Path.Combine(simulatedOptDir, "RAMP", "ramp"); File.WriteAllText(file, "Simulated RAMP starter"); Environment.SetEnvironmentVariable("PATH", $"{simulatedOptDir}{Path.PathSeparator}{pathVariable}"); // Exercise/Verify Assert.That(FileLocationUtilities.LocateInProgramFiles("ramp", false), Is.Null); } finally { try { Environment.SetEnvironmentVariable("PATH", pathVariable); Directory.Delete(simulatedOptDir, true); } catch { // just ignore } } } //TODO: this could use lots more tests [Test] public void LocateExecutable_DistFiles() { Assert.That(FileLocationUtilities.LocateExecutable("DirectoryForTests", "SampleExecutable.exe"), Is.StringEnding(string.Format("DistFiles{0}DirectoryForTests{0}SampleExecutable.exe", Path.DirectorySeparatorChar))); } [Test] [Platform(Exclude = "Linux")] public void LocateExecutable_PlatformSpecificInDistFiles_Windows() { Assert.That(FileLocationUtilities.LocateExecutable("DirectoryForTests", "dummy.exe"), Is.StringEnding(string.Format("DistFiles{0}Windows{0}DirectoryForTests{0}dummy.exe", Path.DirectorySeparatorChar))); } [Test] [Platform(Include = "Linux")] public void LocateExecutable_PlatformSpecificInDistFiles_LinuxWithoutExtension() { Assert.That(FileLocationUtilities.LocateExecutable("DirectoryForTests", "dummy.exe"), Is.StringEnding(string.Format("DistFiles{0}Linux{0}DirectoryForTests{0}dummy", Path.DirectorySeparatorChar))); } [Test] [Platform(Include = "Linux")] public void LocateExecutable_PlatformSpecificInDistFiles_Linux() { Assert.That(FileLocationUtilities.LocateExecutable("DirectoryForTests", "dummy2.exe"), Is.StringEnding(string.Format("DistFiles{0}Linux{0}DirectoryForTests{0}dummy2.exe", Path.DirectorySeparatorChar))); } [Test] public void LocateExecutable_NonexistingFile() { Assert.That(FileLocationUtilities.LocateExecutable(false, "dummy", "__nonexisting.exe"), Is.Null); } [Test] public void LocateExecutable_NonexistingFileThrows() { Assert.That(() => FileLocationUtilities.LocateExecutable("dummy", "__nonexisting.exe"), Throws.Exception.TypeOf<ApplicationException>()); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. // ------------------------------------------------------------------------------ // Changes to this file must follow the http://aka.ms/api-review process. // ------------------------------------------------------------------------------ namespace System { public static partial class AdminHelpers { public static bool IsProcessElevated() { throw null; } public static int RunAsSudo(string commandLine) { throw null; } } public static partial class AssertExtensions { public static void Contains(string value, string substring) { } public static void Equal(byte[] expected, byte[] actual) { } public static void Equal<T>(System.Collections.Generic.HashSet<T> expected, System.Collections.Generic.HashSet<T> actual) { } public static void GreaterThanOrEqualTo<T>(T actual, T greaterThanOrEqualTo, string userMessage = null) where T : System.IComparable { } public static void GreaterThan<T>(T actual, T greaterThan, string userMessage = null) where T : System.IComparable { } public static void LessThanOrEqualTo<T>(T actual, T lessThanOrEqualTo, string userMessage = null) where T : System.IComparable { } public static void LessThan<T>(T actual, T lessThan, string userMessage = null) where T : System.IComparable { } public static System.Exception Throws(System.Type netCoreExceptionType, System.Type netFxExceptionType, System.Action action) { throw null; } public static void ThrowsAny(System.Type firstExceptionType, System.Type secondExceptionType, System.Action action) { } public static void ThrowsAny<TFirstExceptionType, TSecondExceptionType>(System.Action action) where TFirstExceptionType : System.Exception where TSecondExceptionType : System.Exception { } public static void ThrowsAny<TFirstExceptionType, TSecondExceptionType, TThirdExceptionType>(System.Action action) where TFirstExceptionType : System.Exception where TSecondExceptionType : System.Exception where TThirdExceptionType : System.Exception { } public static System.Threading.Tasks.Task<T> ThrowsAsync<T>(string paramName, System.Func<System.Threading.Tasks.Task> testCode) where T : System.ArgumentException { throw null; } public static void ThrowsIf<T>(bool condition, System.Action action) where T : System.Exception { } public static void Throws<T>(System.Action action, string message) where T : System.Exception { } public static T Throws<T>(string paramName, System.Action action) where T : System.ArgumentException { throw null; } public static T Throws<T>(string paramName, System.Func<object> testCode) where T : System.ArgumentException { throw null; } public static void Throws<T>(string netCoreParamName, string netFxParamName, System.Action action) where T : System.ArgumentException { } public static void Throws<T>(string netCoreParamName, string netFxParamName, System.Func<object> testCode) where T : System.ArgumentException { } public static System.Exception Throws<TNetCoreExceptionType, TNetFxExceptionType>(System.Action action) where TNetCoreExceptionType : System.Exception where TNetFxExceptionType : System.Exception { throw null; } public static void Throws<TNetCoreExceptionType, TNetFxExceptionType>(string paramName, System.Action action) where TNetCoreExceptionType : System.ArgumentException where TNetFxExceptionType : System.Exception { } public static void Throws<TNetCoreExceptionType, TNetFxExceptionType>(string netCoreParamName, string netFxParamName, System.Action action) where TNetCoreExceptionType : System.ArgumentException where TNetFxExceptionType : System.ArgumentException { } } public static partial class PlatformDetection { public static bool IsReflectionEmitSupported; public static bool ClientWebSocketPartialMessagesSupported { get { throw null; } } public static bool HasWindowsShell { get { throw null; } } public static System.Version ICUVersion { get { throw null; } } public static bool IsAlpine { get { throw null; } } public static bool IsArgIteratorNotSupported { get { throw null; } } public static bool IsArgIteratorSupported { get { throw null; } } public static bool IsArm64Process { get { throw null; } } public static bool IsArmOrArm64Process { get { throw null; } } public static bool IsArmProcess { get { throw null; } } public static bool IsCentos6 { get { throw null; } } public static bool IsDebian { get { throw null; } } public static bool IsDebian8 { get { throw null; } } public static bool IsDomainJoinedMachine { get { throw null; } } public static bool IsDrawingSupported { get { throw null; } } public static bool IsFedora { get { throw null; } } public static bool IsFreeBSD { get { throw null; } } public static bool IsFullFramework { get { throw null; } } public static bool IsInAppContainer { get { throw null; } } public static bool IsInContainer { get { throw null; } } public static bool IsInvokingStaticConstructorsSupported { get { throw null; } } public static bool IsMacOsHighSierraOrHigher { get { throw null; } } public static bool IsMacOsMojaveOrHigher { get { throw null; } } public static bool IsNetBSD { get { throw null; } } public static bool IsNetCore { get { throw null; } } public static bool IsNetfx462OrNewer { get { throw null; } } public static bool IsNetfx470OrNewer { get { throw null; } } public static bool IsNetfx471OrNewer { get { throw null; } } public static bool IsNetfx472OrNewer { get { throw null; } } public static bool IsNetNative { get { throw null; } } public static bool IsNonZeroLowerBoundArraySupported { get { throw null; } } public static bool IsNotArm64Process { get { throw null; } } public static bool IsNotArmNorArm64Process { get { throw null; } } public static bool IsNotArmProcess { get { throw null; } } public static bool IsNotFedoraOrRedHatFamily { get { throw null; } } public static bool IsNotInAppContainer { get { throw null; } } public static bool IsNotIntMaxValueArrayIndexSupported { get { throw null; } } public static bool IsNotMacOsHighSierraOrHigher { get { throw null; } } public static bool IsNotNetNative { get { throw null; } } public static bool IsNotNetNativeRunningAsConsoleApp { get { throw null; } } public static bool IsNotOneCoreUAP { get { throw null; } } public static bool IsNotRedHatFamily { get { throw null; } } public static bool IsNotRedHatFamily6 { get { throw null; } } public static bool IsNotWindows8x { get { throw null; } } public static bool IsNotWindowsHomeEdition { get { throw null; } } public static bool IsNotWindowsIoTCore { get { throw null; } } public static bool IsNotWindowsNanoServer { get { throw null; } } public static bool IsNotWindowsServerCore { get { throw null; } } public static bool IsNotWindowsSubsystemForLinux { get { throw null; } } public static bool IsNotWinRTSupported { get { throw null; } } public static bool IsOpenSUSE { get { throw null; } } public static bool IsOSX { get { throw null; } } public static bool IsRedHatFamily { get { throw null; } } public static bool IsRedHatFamily6 { get { throw null; } } public static bool IsRedHatFamily7 { get { throw null; } } public static bool IsSoundPlaySupported { get { throw null; } } public static bool IsSuperUser { get { throw null; } } public static bool IsTizen { get { throw null; } } public static bool IsUap { get { throw null; } } public static bool IsUbuntu { get { throw null; } } public static bool IsUbuntu1404 { get { throw null; } } public static bool IsUbuntu1604 { get { throw null; } } public static bool IsUbuntu1704 { get { throw null; } } public static bool IsUbuntu1710 { get { throw null; } } public static bool IsUbuntu1710OrHigher { get { throw null; } } public static bool IsUbuntu1804 { get { throw null; } } public static bool IsUbuntu1810OrHigher { get { throw null; } } public static bool IsWindows { get { throw null; } } public static bool IsWindows10Version1607OrGreater { get { throw null; } } public static bool IsWindows10Version1703OrGreater { get { throw null; } } public static bool IsWindows10Version1709OrGreater { get { throw null; } } public static bool IsWindows10Version1803OrGreater { get { throw null; } } public static bool IsWindows7 { get { throw null; } } public static bool IsWindows8x { get { throw null; } } public static bool IsWindows8xOrLater { get { throw null; } } public static bool IsWindowsAndElevated { get { throw null; } } public static bool IsWindowsHomeEdition { get { throw null; } } public static bool IsWindowsIoTCore { get { throw null; } } public static bool IsWindowsNanoServer { get { throw null; } } public static bool IsWindowsServerCore { get { throw null; } } public static bool IsWindowsSubsystemForLinux { get { throw null; } } public static bool IsWinRTSupported { get { throw null; } } public static bool IsXmlDsigXsltTransformSupported { get { throw null; } } public static string LibcRelease { get { throw null; } } public static string LibcVersion { get { throw null; } } public static System.Version OpenSslVersion { get { throw null; } } public static System.Version OSXVersion { get { throw null; } } public static bool SupportsAlpn { get { throw null; } } public static bool SupportsClientAlpn { get { throw null; } } public static bool SupportsSsl3 { get { throw null; } } public static bool TargetsNetFx452OrLower { get { throw null; } } public static int WindowsVersion { get { throw null; } } public static string GetDistroVersionString() { throw null; } } public static partial class TestEnvironment { public static bool IsStressModeEnabled { get { throw null; } } } public static partial class TheoryExtensions { [System.CLSCompliantAttribute(false)] public static Xunit.TheoryData ToTheoryData<T>(this System.Collections.Generic.IEnumerable<T> data) { throw null; } } } namespace System.Diagnostics { public abstract partial class RemoteExecutorTestBase : System.IO.FileCleanupTestBase { public const int FailWaitTimeoutMilliseconds = 60000; protected static readonly string HostRunner; protected static readonly string HostRunnerName; public const int SuccessExitCode = 42; protected static readonly string TestConsoleApp; protected RemoteExecutorTestBase() { } public static System.Diagnostics.RemoteExecutorTestBase.RemoteInvokeHandle RemoteInvoke(System.Action method, System.Diagnostics.RemoteInvokeOptions options = null) { throw null; } public static System.Diagnostics.RemoteExecutorTestBase.RemoteInvokeHandle RemoteInvoke(System.Action<string, string, string, string> method, string arg1, string arg2, string arg3, string arg4, System.Diagnostics.RemoteInvokeOptions options = null) { throw null; } public static System.Diagnostics.RemoteExecutorTestBase.RemoteInvokeHandle RemoteInvoke(System.Action<string, string, string> method, string arg1, string arg2, string arg3, System.Diagnostics.RemoteInvokeOptions options = null) { throw null; } public static System.Diagnostics.RemoteExecutorTestBase.RemoteInvokeHandle RemoteInvoke(System.Action<string, string> method, string arg1, string arg2, System.Diagnostics.RemoteInvokeOptions options = null) { throw null; } public static System.Diagnostics.RemoteExecutorTestBase.RemoteInvokeHandle RemoteInvoke(System.Action<string> method, string arg1, System.Diagnostics.RemoteInvokeOptions options = null) { throw null; } public static System.Diagnostics.RemoteExecutorTestBase.RemoteInvokeHandle RemoteInvoke(System.Func<int> method, System.Diagnostics.RemoteInvokeOptions options = null) { throw null; } public static System.Diagnostics.RemoteExecutorTestBase.RemoteInvokeHandle RemoteInvoke(System.Func<string, int> method, string arg, System.Diagnostics.RemoteInvokeOptions options = null) { throw null; } public static System.Diagnostics.RemoteExecutorTestBase.RemoteInvokeHandle RemoteInvoke(System.Func<string, string, int> method, string arg1, string arg2, System.Diagnostics.RemoteInvokeOptions options = null) { throw null; } public static System.Diagnostics.RemoteExecutorTestBase.RemoteInvokeHandle RemoteInvoke(System.Func<string, string, string, int> method, string arg1, string arg2, string arg3, System.Diagnostics.RemoteInvokeOptions options = null) { throw null; } public static System.Diagnostics.RemoteExecutorTestBase.RemoteInvokeHandle RemoteInvoke(System.Func<string, string, string, string, int> method, string arg1, string arg2, string arg3, string arg4, System.Diagnostics.RemoteInvokeOptions options = null) { throw null; } public static System.Diagnostics.RemoteExecutorTestBase.RemoteInvokeHandle RemoteInvoke(System.Func<string, string, string, string, string, int> method, string arg1, string arg2, string arg3, string arg4, string arg5, System.Diagnostics.RemoteInvokeOptions options = null) { throw null; } public static System.Diagnostics.RemoteExecutorTestBase.RemoteInvokeHandle RemoteInvoke(System.Func<string, string, string, System.Threading.Tasks.Task<int>> method, string arg1, string arg2, string arg3, System.Diagnostics.RemoteInvokeOptions options = null) { throw null; } public static System.Diagnostics.RemoteExecutorTestBase.RemoteInvokeHandle RemoteInvoke(System.Func<string, string, System.Threading.Tasks.Task<int>> method, string arg1, string arg2, System.Diagnostics.RemoteInvokeOptions options = null) { throw null; } public static System.Diagnostics.RemoteExecutorTestBase.RemoteInvokeHandle RemoteInvoke(System.Func<string, System.Threading.Tasks.Task<int>> method, string arg, System.Diagnostics.RemoteInvokeOptions options = null) { throw null; } public static System.Diagnostics.RemoteExecutorTestBase.RemoteInvokeHandle RemoteInvoke(System.Func<System.Threading.Tasks.Task<int>> method, System.Diagnostics.RemoteInvokeOptions options = null) { throw null; } public static System.Diagnostics.RemoteExecutorTestBase.RemoteInvokeHandle RemoteInvokeRaw(System.Delegate method, string unparsedArg, System.Diagnostics.RemoteInvokeOptions options = null) { throw null; } public sealed partial class RemoteInvokeHandle : System.IDisposable { public RemoteInvokeHandle(System.Diagnostics.Process process, System.Diagnostics.RemoteInvokeOptions options, string assemblyName, string className, string methodName) { } public int ExitCode { get { throw null; } } public System.Diagnostics.RemoteInvokeOptions Options { get { throw null; } } public System.Diagnostics.Process Process { get { throw null; } set { } } public void Dispose() { } } } public sealed partial class RemoteInvokeOptions { public RemoteInvokeOptions() { } public bool CheckExitCode { get { throw null; } set { } } public bool EnableProfiling { get { throw null; } set { } } public string ExceptionFile { get { throw null; } } public int ExpectedExitCode { get { throw null; } set { } } public bool RunAsSudo { get { throw null; } set { } } public bool Start { get { throw null; } set { } } public System.Diagnostics.ProcessStartInfo StartInfo { get { throw null; } set { } } public int TimeOut { get { throw null; } set { } } } } namespace System.IO { public abstract partial class FileCleanupTestBase : System.IDisposable { protected FileCleanupTestBase() { } protected string TestDirectory { get { throw null; } } public void Dispose() { } protected virtual void Dispose(bool disposing) { } ~FileCleanupTestBase() { } protected string GetTestFileName(int? index = default(int?), [System.Runtime.CompilerServices.CallerMemberNameAttribute]string memberName = null, [System.Runtime.CompilerServices.CallerLineNumberAttribute]int lineNumber = 0) { throw null; } protected string GetTestFilePath(int? index = default(int?), [System.Runtime.CompilerServices.CallerMemberNameAttribute]string memberName = null, [System.Runtime.CompilerServices.CallerLineNumberAttribute]int lineNumber = 0) { throw null; } } }
using System; using UIKit; using Foundation; using CoreGraphics; using QuartzSample; public class LineDrawingView : QuartzView { public override void DrawInContext (CGContext context) { // Draw lines with a white stroke color context.SetStrokeColor (1f, 1f, 1f, 1f); // Draw them with a 2.0 stroke width so they are more visible context.SetLineWidth (2); context.MoveTo (10, 30); context.AddLineToPoint (310, 30); context.StrokePath (); // Draw connected sequence of lines var points = new CGPoint [] { new CGPoint (10, 90), new CGPoint (70, 60), new CGPoint (130, 90), new CGPoint (190, 60), new CGPoint (250, 90), new CGPoint (310, 60) }; context.AddLines (points); context.StrokePath (); var segments = new CGPoint [] { new CGPoint (10, 150), new CGPoint (70, 120), new CGPoint (130, 150), new CGPoint (190, 120), new CGPoint (250, 150), new CGPoint (310, 120), }; // Bulk call to stroke a sequence of line segments context.StrokeLineSegments (segments); } } public class LineWidthDrawingView : QuartzView { public override void DrawInContext (CGContext context) { context.SetStrokeColor (1, 1, 1, 1f); // Draw lines with a stroke width from 1-10 for (int i = 1; i <= 10; ++i) { context.SetLineWidth (i); context.MoveTo (10, (float)i * 20.5f); context.AddLineToPoint (310, (float)i * 20.5f); context.StrokePath (); } // Demonstration that stroke is even on both sides of the line context.SetLineWidth (15); context.MoveTo (10, 245.5f); context.AddLineToPoint (310, 245.5f); context.StrokePath (); context.SetStrokeColor (1, 0, 0, 1); context.SetLineWidth (3); context.MoveTo (10, 245.5f); context.AddLineToPoint (310, 245.5f); context.StrokePath (); } } public class LineCapJoinDrawingView : QuartzView { public override void DrawInContext (CGContext context) { // Drawing lines with a white stroke color context.SetStrokeColor (1, 1, 1, 1); // Preserve the current drawing state context.SaveState (); // Set the line width so that the cap is visible context.SetLineWidth (20); // Line caps demonstration // Line cap butt, default. context.SetLineCap (CGLineCap.Butt); context.MoveTo (40, 30); context.AddLineToPoint (280, 30); context.StrokePath (); // Line cap round context.SetLineCap (CGLineCap.Round); context.MoveTo (40, 65); context.AddLineToPoint (280, 65); context.StrokePath (); // Line cap square context.SetLineCap (CGLineCap.Square); context.MoveTo (40, 100); context.AddLineToPoint (280, 100); context.StrokePath (); // Restore the previous drawing state, and save it again. context.RestoreState (); context.SaveState (); // Set the line width so that the join is visible context.SetLineWidth (20); // Line join miter, default context.SetLineJoin (CGLineJoin.Miter); context.MoveTo (40, 260); context.AddLineToPoint (160, 140); context.AddLineToPoint (280, 260); context.StrokePath (); // Line join round context.SetLineJoin (CGLineJoin.Round); context.MoveTo (40, 320); context.AddLineToPoint (160, 200); context.AddLineToPoint (280, 320); context.StrokePath (); // Line join bevel context.SetLineJoin (CGLineJoin.Bevel); context.MoveTo (40, 380); context.AddLineToPoint (160, 260); context.AddLineToPoint (280, 380); context.StrokePath (); // Restore the previous drawing state. context.RestoreState (); // Demonstrate where the path that generated each line is context.SetStrokeColor (1, 0, 0, 1); context.SetLineWidth (3); context.MoveTo (40, 30); context.AddLineToPoint (280, 30); context.MoveTo (40, 65); context.AddLineToPoint (280, 65); context.MoveTo (40, 100); context.AddLineToPoint (280, 100); context.MoveTo (40, 260); context.AddLineToPoint (160, 140); context.AddLineToPoint (280, 260); context.MoveTo (40, 320); context.AddLineToPoint (160, 200); context.AddLineToPoint (280, 320); context.MoveTo (40, 380); context.AddLineToPoint (160, 260); context.AddLineToPoint (280, 380); context.StrokePath (); } } public class LineDashDrawingView : QuartzView { public override void DrawInContext (CGContext context) { // Drawing lines with a white stroke color context.SetStrokeColor (1, 1, 1, 1); // Draw them with a 2 stroke width so they are a bit more visible. context.SetLineWidth (2); // Each dash entry is a run-length in the current coordinate system. // For dash1 we demonstrate the effect of the number of entries in the dash array // when count==2, we get length 10 drawn, length 10 skipped, etc // when count==3, we get 10 drawn, 10 skipped, 20 draw, 10 skipped, 10 drawn, 20 skipped, etc // and so on var dash1 = new nfloat [] { 10, 10, 20, 30, 50 }; // Different dash lengths for (int i = 2; i <= 5; ++i) { context.SetLineDash (0, dash1, i); context.MoveTo (10, (i - 1) * 20); context.AddLineToPoint (310, (i - 1) * 20); context.StrokePath (); } // For dash2 we always use count 4, but use it to demonstrate the phase // phase=0 starts us 0 points into the dash, so we draw 10, skip 10, draw 20, skip 20, etc. // phase=6 starts 6 points in, so we draw 4, skip 10, draw 20, skip 20, draw 10, skip 10, etc. // phase=12 stats us 12 points in, so we skip 8, draw 20, skip 20, draw 10, skip 10, etc. // and so on. nfloat[] dash2 = { 10, 10, 20, 20 }; // Different dash phases for (int i = 0; i < 10; ++i) { context.SetLineDash (i * 6, dash2, 4); context.MoveTo (10, (float)(i + 6) * 20); context.AddLineToPoint (310, (float)(i + 6) * 20); context.StrokePath (); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections; using System.Collections.Generic; using System.Threading; namespace System.Diagnostics { public class TraceSource { private static List<WeakReference> s_tracesources = new List<WeakReference>(); private static int s_LastCollectionCount; private volatile SourceSwitch _internalSwitch; private volatile TraceListenerCollection _listeners; private SourceLevels _switchLevel; private volatile string _sourceName; internal volatile bool _initCalled = false; // Whether we've called Initialize already. public TraceSource(string name) : this(name, SourceLevels.Off) { } public TraceSource(string name, SourceLevels defaultLevel) { if (name == null) throw new ArgumentNullException(nameof(name)); if (name.Length == 0) throw new ArgumentException(SR.Format(SR.InvalidNullEmptyArgument, nameof(name)), nameof(name)); _sourceName = name; _switchLevel = defaultLevel; // Add a weakreference to this source and cleanup invalid references lock (s_tracesources) { _pruneCachedTraceSources(); s_tracesources.Add(new WeakReference(this)); } } private static void _pruneCachedTraceSources() { lock (s_tracesources) { if (s_LastCollectionCount != GC.CollectionCount(2)) { List<WeakReference> buffer = new List<WeakReference>(s_tracesources.Count); for (int i = 0; i < s_tracesources.Count; i++) { TraceSource tracesource = ((TraceSource)s_tracesources[i].Target); if (tracesource != null) { buffer.Add(s_tracesources[i]); } } if (buffer.Count < s_tracesources.Count) { s_tracesources.Clear(); s_tracesources.AddRange(buffer); s_tracesources.TrimExcess(); } s_LastCollectionCount = GC.CollectionCount(2); } } } private void Initialize() { if (!_initCalled) { lock (this) { if (_initCalled) return; NoConfigInit(); _initCalled = true; } } } private void NoConfigInit() { _internalSwitch = new SourceSwitch(_sourceName, _switchLevel.ToString()); _listeners = new TraceListenerCollection(); _listeners.Add(new DefaultTraceListener()); } public void Close() { // No need to call Initialize() if (_listeners != null) { // Use global lock lock (TraceInternal.critSec) { foreach (TraceListener listener in _listeners) { listener.Dispose(); } } } } public void Flush() { // No need to call Initialize() if (_listeners != null) { if (TraceInternal.UseGlobalLock) { lock (TraceInternal.critSec) { foreach (TraceListener listener in _listeners) { listener.Flush(); } } } else { foreach (TraceListener listener in _listeners) { if (!listener.IsThreadSafe) { lock (listener) { listener.Flush(); } } else { listener.Flush(); } } } } } internal static void RefreshAll() { lock (s_tracesources) { _pruneCachedTraceSources(); for (int i = 0; i < s_tracesources.Count; i++) { TraceSource tracesource = ((TraceSource)s_tracesources[i].Target); if (tracesource != null) { tracesource.Refresh(); } } } } internal void Refresh() { if (!_initCalled) { Initialize(); return; } } [Conditional("TRACE")] public void TraceEvent(TraceEventType eventType, int id) { Initialize(); if (_internalSwitch.ShouldTrace(eventType) && _listeners != null) { TraceEventCache manager = new TraceEventCache(); if (TraceInternal.UseGlobalLock) { // we lock on the same object that Trace does because we're writing to the same Listeners. lock (TraceInternal.critSec) { for (int i = 0; i < _listeners.Count; i++) { TraceListener listener = _listeners[i]; listener.TraceEvent(manager, Name, eventType, id); if (Trace.AutoFlush) listener.Flush(); } } } else { for (int i = 0; i < _listeners.Count; i++) { TraceListener listener = _listeners[i]; if (!listener.IsThreadSafe) { lock (listener) { listener.TraceEvent(manager, Name, eventType, id); if (Trace.AutoFlush) listener.Flush(); } } else { listener.TraceEvent(manager, Name, eventType, id); if (Trace.AutoFlush) listener.Flush(); } } } } } [Conditional("TRACE")] public void TraceEvent(TraceEventType eventType, int id, string message) { Initialize(); if (_internalSwitch.ShouldTrace(eventType) && _listeners != null) { TraceEventCache manager = new TraceEventCache(); if (TraceInternal.UseGlobalLock) { // we lock on the same object that Trace does because we're writing to the same Listeners. lock (TraceInternal.critSec) { for (int i = 0; i < _listeners.Count; i++) { TraceListener listener = _listeners[i]; listener.TraceEvent(manager, Name, eventType, id, message); if (Trace.AutoFlush) listener.Flush(); } } } else { for (int i = 0; i < _listeners.Count; i++) { TraceListener listener = _listeners[i]; if (!listener.IsThreadSafe) { lock (listener) { listener.TraceEvent(manager, Name, eventType, id, message); if (Trace.AutoFlush) listener.Flush(); } } else { listener.TraceEvent(manager, Name, eventType, id, message); if (Trace.AutoFlush) listener.Flush(); } } } } } [Conditional("TRACE")] public void TraceEvent(TraceEventType eventType, int id, string format, params object[] args) { Initialize(); if (_internalSwitch.ShouldTrace(eventType) && _listeners != null) { TraceEventCache manager = new TraceEventCache(); if (TraceInternal.UseGlobalLock) { // we lock on the same object that Trace does because we're writing to the same Listeners. lock (TraceInternal.critSec) { for (int i = 0; i < _listeners.Count; i++) { TraceListener listener = _listeners[i]; listener.TraceEvent(manager, Name, eventType, id, format, args); if (Trace.AutoFlush) listener.Flush(); } } } else { for (int i = 0; i < _listeners.Count; i++) { TraceListener listener = _listeners[i]; if (!listener.IsThreadSafe) { lock (listener) { listener.TraceEvent(manager, Name, eventType, id, format, args); if (Trace.AutoFlush) listener.Flush(); } } else { listener.TraceEvent(manager, Name, eventType, id, format, args); if (Trace.AutoFlush) listener.Flush(); } } } } } [Conditional("TRACE")] public void TraceData(TraceEventType eventType, int id, object data) { Initialize(); if (_internalSwitch.ShouldTrace(eventType) && _listeners != null) { TraceEventCache manager = new TraceEventCache(); if (TraceInternal.UseGlobalLock) { // we lock on the same object that Trace does because we're writing to the same Listeners. lock (TraceInternal.critSec) { for (int i = 0; i < _listeners.Count; i++) { TraceListener listener = _listeners[i]; listener.TraceData(manager, Name, eventType, id, data); if (Trace.AutoFlush) listener.Flush(); } } } else { for (int i = 0; i < _listeners.Count; i++) { TraceListener listener = _listeners[i]; if (!listener.IsThreadSafe) { lock (listener) { listener.TraceData(manager, Name, eventType, id, data); if (Trace.AutoFlush) listener.Flush(); } } else { listener.TraceData(manager, Name, eventType, id, data); if (Trace.AutoFlush) listener.Flush(); } } } } } [Conditional("TRACE")] public void TraceData(TraceEventType eventType, int id, params object[] data) { Initialize(); if (_internalSwitch.ShouldTrace(eventType) && _listeners != null) { TraceEventCache manager = new TraceEventCache(); if (TraceInternal.UseGlobalLock) { // we lock on the same object that Trace does because we're writing to the same Listeners. lock (TraceInternal.critSec) { for (int i = 0; i < _listeners.Count; i++) { TraceListener listener = _listeners[i]; listener.TraceData(manager, Name, eventType, id, data); if (Trace.AutoFlush) listener.Flush(); } } } else { for (int i = 0; i < _listeners.Count; i++) { TraceListener listener = _listeners[i]; if (!listener.IsThreadSafe) { lock (listener) { listener.TraceData(manager, Name, eventType, id, data); if (Trace.AutoFlush) listener.Flush(); } } else { listener.TraceData(manager, Name, eventType, id, data); if (Trace.AutoFlush) listener.Flush(); } } } } } [Conditional("TRACE")] public void TraceInformation(string message) { // eventType= TraceEventType.Info, id=0 // No need to call Initialize() TraceEvent(TraceEventType.Information, 0, message, null); } [Conditional("TRACE")] public void TraceInformation(string format, params object[] args) { // No need to call Initialize() TraceEvent(TraceEventType.Information, 0, format, args); } public string Name { get { return _sourceName; } } public TraceListenerCollection Listeners { get { Initialize(); return _listeners; } } public SourceSwitch Switch { // No need for security demand here. SourceSwitch.set_Level is protected already. get { Initialize(); return _internalSwitch; } set { if (value == null) throw new ArgumentNullException(nameof(Switch)); Initialize(); _internalSwitch = value; } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Xml; using System.Collections; using System.Globalization; using System.Diagnostics; using System.Data.Common; namespace System.Data { internal sealed class XDRSchema : XMLSchema { internal string _schemaName; internal string _schemaUri; internal XmlElement _schemaRoot; internal DataSet _ds; private static readonly char[] s_colonArray = new char[] { ':' }; internal XDRSchema(DataSet ds, bool fInline) { _schemaUri = string.Empty; _schemaName = string.Empty; _schemaRoot = null; _ds = ds; } internal void LoadSchema(XmlElement schemaRoot, DataSet ds) { if (schemaRoot == null) return; _schemaRoot = schemaRoot; _ds = ds; _schemaName = schemaRoot.GetAttribute(Keywords.NAME); _schemaUri = string.Empty; Debug.Assert(FEqualIdentity(schemaRoot, Keywords.XDR_SCHEMA, Keywords.XDRNS), "Illegal node"); // Get Locale and CaseSensitive properties if (_schemaName == null || _schemaName.Length == 0) _schemaName = "NewDataSet"; ds.Namespace = _schemaUri; // Walk all the top level Element tags. for (XmlNode n = schemaRoot.FirstChild; n != null; n = n.NextSibling) { if (!(n is XmlElement)) continue; XmlElement child = (XmlElement)n; if (FEqualIdentity(child, Keywords.XDR_ELEMENTTYPE, Keywords.XDRNS)) { HandleTable(child); } } _schemaName = XmlConvert.DecodeName(_schemaName); if (ds.Tables[_schemaName] == null) ds.DataSetName = _schemaName; } internal XmlElement FindTypeNode(XmlElement node) { string strType; XmlNode vn; XmlNode vnRoof; Debug.Assert(FEqualIdentity(node, Keywords.XDR_ELEMENT, Keywords.XDRNS) || FEqualIdentity(node, Keywords.XDR_SCHEMA, Keywords.XDRNS) || FEqualIdentity(node, Keywords.XDR_ATTRIBUTE, Keywords.XDRNS) || FEqualIdentity(node, Keywords.XDR_ELEMENTTYPE, Keywords.XDRNS), "Invalid node type " + node.LocalName); if (FEqualIdentity(node, Keywords.XDR_ELEMENTTYPE, Keywords.XDRNS)) return node; strType = node.GetAttribute(Keywords.TYPE); if (FEqualIdentity(node, Keywords.XDR_ELEMENT, Keywords.XDRNS) || FEqualIdentity(node, Keywords.XDR_ATTRIBUTE, Keywords.XDRNS)) { if (strType == null || strType.Length == 0) return null; // Find an ELEMENTTYPE or ATTRIBUTETYPE with name=strType vn = node.OwnerDocument.FirstChild; vnRoof = node.OwnerDocument; while (vn != vnRoof) { if ((FEqualIdentity(vn, Keywords.XDR_ELEMENTTYPE, Keywords.XDRNS) && FEqualIdentity(node, Keywords.XDR_ELEMENT, Keywords.XDRNS)) || (FEqualIdentity(vn, Keywords.XDR_ATTRIBUTETYPE, Keywords.XDRNS) && FEqualIdentity(node, Keywords.XDR_ATTRIBUTE, Keywords.XDRNS))) { if (vn is XmlElement && ((XmlElement)vn).GetAttribute(Keywords.NAME) == strType) return (XmlElement)vn; } // Move vn node if (vn.FirstChild != null) vn = vn.FirstChild; else if (vn.NextSibling != null) vn = vn.NextSibling; else { while (vn != vnRoof) { vn = vn.ParentNode; if (vn.NextSibling != null) { vn = vn.NextSibling; break; } } } } return null; } return null; } internal bool IsTextOnlyContent(XmlElement node) { Debug.Assert(FEqualIdentity(node, Keywords.XDR_ELEMENTTYPE, Keywords.XDRNS), "Invalid node type " + node.LocalName); string value = node.GetAttribute(Keywords.CONTENT); if (value == null || value.Length == 0) { string type = node.GetAttribute(Keywords.DT_TYPE, Keywords.DTNS); return !string.IsNullOrEmpty(type); } if (value == Keywords.EMPTY || value == Keywords.ELTONLY || value == Keywords.ELEMENTONLY || value == Keywords.MIXED) { return false; } if (value == Keywords.TEXTONLY) { return true; } throw ExceptionBuilder.InvalidAttributeValue("content", value); } internal bool IsXDRField(XmlElement node, XmlElement typeNode) { int min = 1; int max = 1; if (!IsTextOnlyContent(typeNode)) return false; for (XmlNode n = typeNode.FirstChild; n != null; n = n.NextSibling) { if (FEqualIdentity(n, Keywords.XDR_ELEMENT, Keywords.XDRNS) || FEqualIdentity(n, Keywords.XDR_ATTRIBUTE, Keywords.XDRNS)) return false; } if (FEqualIdentity(node, Keywords.XDR_ELEMENT, Keywords.XDRNS)) { GetMinMax(node, ref min, ref max); if (max == -1 || max > 1) return false; } return true; } internal DataTable HandleTable(XmlElement node) { XmlElement typeNode; Debug.Assert(FEqualIdentity(node, Keywords.XDR_ELEMENTTYPE, Keywords.XDRNS) || FEqualIdentity(node, Keywords.XDR_ELEMENT, Keywords.XDRNS), "Invalid node type"); // Figure out if this really is a table. If not, bail out. typeNode = FindTypeNode(node); string occurs = node.GetAttribute(Keywords.MINOCCURS); if (occurs != null && occurs.Length > 0) if ((Convert.ToInt32(occurs, CultureInfo.InvariantCulture) > 1) && (typeNode == null)) { return InstantiateSimpleTable(_ds, node); } occurs = node.GetAttribute(Keywords.MAXOCCURS); if (occurs != null && occurs.Length > 0) if (!string.Equals(occurs, "1", StringComparison.Ordinal) && (typeNode == null)) { return InstantiateSimpleTable(_ds, node); } if (typeNode == null) return null; if (IsXDRField(node, typeNode)) return null; return InstantiateTable(_ds, node, typeNode); } private sealed class NameType : IComparable { public string name; public Type type; public NameType(string n, Type t) { name = n; type = t; } public int CompareTo(object obj) { return string.Compare(name, (string)obj, StringComparison.Ordinal); } }; // XDR spec: http://www.ltg.ed.ac.uk/~ht/XMLData-Reduced.htm private static readonly NameType[] s_mapNameTypeXdr = { new NameType("bin.base64", typeof(byte[]) ), new NameType("bin.hex", typeof(byte[]) ), new NameType("boolean", typeof(bool) ), new NameType("byte", typeof(sbyte) ), new NameType("char", typeof(char) ), new NameType("date", typeof(DateTime)), new NameType("dateTime", typeof(DateTime)), new NameType("dateTime.tz", typeof(DateTime)), new NameType("entities", typeof(string) ), new NameType("entity", typeof(string) ), new NameType("enumeration", typeof(string) ), new NameType("fixed.14.4", typeof(decimal) ), new NameType("float", typeof(double) ), new NameType("i1", typeof(sbyte) ), new NameType("i2", typeof(short) ), new NameType("i4", typeof(int) ), new NameType("i8", typeof(long) ), new NameType("id", typeof(string) ), new NameType("idref", typeof(string) ), new NameType("idrefs", typeof(string) ), new NameType("int", typeof(int) ), new NameType("nmtoken", typeof(string) ), new NameType("nmtokens", typeof(string) ), new NameType("notation", typeof(string) ), new NameType("number", typeof(decimal) ), new NameType("r4", typeof(float) ), new NameType("r8", typeof(double) ), new NameType("string", typeof(string) ), new NameType("time", typeof(DateTime)), new NameType("time.tz", typeof(DateTime)), new NameType("ui1", typeof(byte) ), new NameType("ui2", typeof(ushort) ), new NameType("ui4", typeof(uint) ), new NameType("ui8", typeof(ulong) ), new NameType("uri", typeof(string) ), new NameType("uuid", typeof(Guid) ), }; private static NameType FindNameType(string name) { #if DEBUG for (int i = 1; i < s_mapNameTypeXdr.Length; ++i) { Debug.Assert((s_mapNameTypeXdr[i - 1].CompareTo(s_mapNameTypeXdr[i].name)) < 0, "incorrect sorting"); } #endif int index = Array.BinarySearch(s_mapNameTypeXdr, name); if (index < 0) { #if DEBUG // Let's check that we realy don't have this name: foreach (NameType nt in s_mapNameTypeXdr) { Debug.Assert(nt.name != name, "FindNameType('" + name + "') -- failed. Existed name not found"); } #endif throw ExceptionBuilder.UndefinedDatatype(name); } Debug.Assert(s_mapNameTypeXdr[index].name == name, "FindNameType('" + name + "') -- failed. Wrong name found"); return s_mapNameTypeXdr[index]; } private static readonly NameType s_enumerationNameType = FindNameType("enumeration"); private Type ParseDataType(string dt, string dtValues) { string strType = dt; string[] parts = dt.Split(s_colonArray); // ":" if (parts.Length > 2) { throw ExceptionBuilder.InvalidAttributeValue("type", dt); } else if (parts.Length == 2) { // CONSIDER: check that we have valid prefix strType = parts[1]; } NameType nt = FindNameType(strType); if (nt == s_enumerationNameType && (dtValues == null || dtValues.Length == 0)) throw ExceptionBuilder.MissingAttribute("type", Keywords.DT_VALUES); return nt.type; } internal string GetInstanceName(XmlElement node) { string instanceName; if (FEqualIdentity(node, Keywords.XDR_ELEMENTTYPE, Keywords.XDRNS) || FEqualIdentity(node, Keywords.XDR_ATTRIBUTETYPE, Keywords.XDRNS)) { instanceName = node.GetAttribute(Keywords.NAME); if (instanceName == null || instanceName.Length == 0) { throw ExceptionBuilder.MissingAttribute("Element", Keywords.NAME); } } else { instanceName = node.GetAttribute(Keywords.TYPE); if (instanceName == null || instanceName.Length == 0) throw ExceptionBuilder.MissingAttribute("Element", Keywords.TYPE); } return instanceName; } internal void HandleColumn(XmlElement node, DataTable table) { Debug.Assert(FEqualIdentity(node, Keywords.XDR_ELEMENT, Keywords.XDRNS) || FEqualIdentity(node, Keywords.XDR_ATTRIBUTE, Keywords.XDRNS), "Illegal node type"); string instanceName; string strName; Type type; string strType; string strValues; int minOccurs = 0; int maxOccurs = 1; string strDefault; DataColumn column; // Get the name if (node.Attributes.Count > 0) { string strRef = node.GetAttribute(Keywords.REF); if (strRef != null && strRef.Length > 0) return; //skip ref nodes. B2 item strName = instanceName = GetInstanceName(node); column = table.Columns[instanceName, _schemaUri]; if (column != null) { if (column.ColumnMapping == MappingType.Attribute) { if (FEqualIdentity(node, Keywords.XDR_ATTRIBUTE, Keywords.XDRNS)) throw ExceptionBuilder.DuplicateDeclaration(strName); } else { if (FEqualIdentity(node, Keywords.XDR_ELEMENT, Keywords.XDRNS)) { throw ExceptionBuilder.DuplicateDeclaration(strName); } } instanceName = GenUniqueColumnName(strName, table); } } else { strName = instanceName = string.Empty; } // Now get the type XmlElement typeNode = FindTypeNode(node); SimpleType xsdType = null; if (typeNode == null) { strType = node.GetAttribute(Keywords.TYPE); throw ExceptionBuilder.UndefinedDatatype(strType); } strType = typeNode.GetAttribute(Keywords.DT_TYPE, Keywords.DTNS); strValues = typeNode.GetAttribute(Keywords.DT_VALUES, Keywords.DTNS); if (strType == null || strType.Length == 0) { strType = string.Empty; type = typeof(string); } else { type = ParseDataType(strType, strValues); // HACK: temp work around special types if (strType == "float") { strType = string.Empty; } if (strType == "char") { strType = string.Empty; xsdType = SimpleType.CreateSimpleType(StorageType.Char, type); } if (strType == "enumeration") { strType = string.Empty; xsdType = SimpleType.CreateEnumeratedType(strValues); } if (strType == "bin.base64") { strType = string.Empty; xsdType = SimpleType.CreateByteArrayType("base64"); } if (strType == "bin.hex") { strType = string.Empty; xsdType = SimpleType.CreateByteArrayType("hex"); } } bool isAttribute = FEqualIdentity(node, Keywords.XDR_ATTRIBUTE, Keywords.XDRNS); GetMinMax(node, isAttribute, ref minOccurs, ref maxOccurs); strDefault = null; // Does XDR has default? strDefault = node.GetAttribute(Keywords.DEFAULT); bool bNullable = false; column = new DataColumn(XmlConvert.DecodeName(instanceName), type, null, isAttribute ? MappingType.Attribute : MappingType.Element); SetProperties(column, node.Attributes); // xmlschema.SetProperties will skipp setting expressions column.XmlDataType = strType; column.SimpleType = xsdType; column.AllowDBNull = (minOccurs == 0) || bNullable; column.Namespace = (isAttribute) ? string.Empty : _schemaUri; // We will skip handling expression columns in SetProperties, so we need set the expressions here if (node.Attributes != null) { for (int i = 0; i < node.Attributes.Count; i++) { if (node.Attributes[i].NamespaceURI == Keywords.MSDNS) { if (node.Attributes[i].LocalName == "Expression") { column.Expression = node.Attributes[i].Value; break; } } } } string targetNamespace = node.GetAttribute(Keywords.TARGETNAMESPACE); if (targetNamespace != null && targetNamespace.Length > 0) column.Namespace = targetNamespace; table.Columns.Add(column); if (strDefault != null && strDefault.Length != 0) try { column.DefaultValue = SqlConvert.ChangeTypeForXML(strDefault, type); } catch (System.FormatException) { throw ExceptionBuilder.CannotConvert(strDefault, type.FullName); } } internal void GetMinMax(XmlElement elNode, ref int minOccurs, ref int maxOccurs) { GetMinMax(elNode, false, ref minOccurs, ref maxOccurs); } internal void GetMinMax(XmlElement elNode, bool isAttribute, ref int minOccurs, ref int maxOccurs) { string occurs = elNode.GetAttribute(Keywords.MINOCCURS); if (occurs != null && occurs.Length > 0) { try { minOccurs = int.Parse(occurs, CultureInfo.InvariantCulture); } catch (Exception e) when (ADP.IsCatchableExceptionType(e)) { throw ExceptionBuilder.AttributeValues(nameof(minOccurs), "0", "1"); } } occurs = elNode.GetAttribute(Keywords.MAXOCCURS); if (occurs != null && occurs.Length > 0) { int bZeroOrMore = string.Compare(occurs, Keywords.STAR, StringComparison.Ordinal); if (bZeroOrMore == 0) { maxOccurs = -1; } else { try { maxOccurs = int.Parse(occurs, CultureInfo.InvariantCulture); } catch (Exception e) when (ADP.IsCatchableExceptionType(e)) { throw ExceptionBuilder.AttributeValues(nameof(maxOccurs), "1", Keywords.STAR); } if (maxOccurs != 1) { throw ExceptionBuilder.AttributeValues(nameof(maxOccurs), "1", Keywords.STAR); } } } } internal void HandleTypeNode(XmlElement typeNode, DataTable table, ArrayList tableChildren) { DataTable tableChild; for (XmlNode n = typeNode.FirstChild; n != null; n = n.NextSibling) { if (!(n is XmlElement)) continue; if (FEqualIdentity(n, Keywords.XDR_ELEMENT, Keywords.XDRNS)) { tableChild = HandleTable((XmlElement)n); if (tableChild != null) { tableChildren.Add(tableChild); continue; } } if (FEqualIdentity(n, Keywords.XDR_ATTRIBUTE, Keywords.XDRNS) || FEqualIdentity(n, Keywords.XDR_ELEMENT, Keywords.XDRNS)) { HandleColumn((XmlElement)n, table); continue; } } } internal DataTable InstantiateTable(DataSet dataSet, XmlElement node, XmlElement typeNode) { string typeName = string.Empty; XmlAttributeCollection attrs = node.Attributes; DataTable table; int minOccurs = 1; int maxOccurs = 1; string keys = null; ArrayList tableChildren = new ArrayList(); if (attrs.Count > 0) { typeName = GetInstanceName(node); table = dataSet.Tables.GetTable(typeName, _schemaUri); if (table != null) { return table; } } table = new DataTable(XmlConvert.DecodeName(typeName)); // fxcop: new DataTable should inherit the CaseSensitive, Locale from DataSet and possibly updating during SetProperties table.Namespace = _schemaUri; GetMinMax(node, ref minOccurs, ref maxOccurs); table.MinOccurs = minOccurs; table.MaxOccurs = maxOccurs; _ds.Tables.Add(table); HandleTypeNode(typeNode, table, tableChildren); SetProperties(table, attrs); // check to see if we fave unique constraint if (keys != null) { string[] list = keys.TrimEnd(null).Split(null); int keyLength = list.Length; var cols = new DataColumn[keyLength]; for (int i = 0; i < keyLength; i++) { DataColumn col = table.Columns[list[i], _schemaUri]; if (col == null) throw ExceptionBuilder.ElementTypeNotFound(list[i]); cols[i] = col; } table.PrimaryKey = cols; } foreach (DataTable _tableChild in tableChildren) { DataRelation relation = null; DataRelationCollection childRelations = table.ChildRelations; for (int j = 0; j < childRelations.Count; j++) { if (!childRelations[j].Nested) continue; if (_tableChild == childRelations[j].ChildTable) relation = childRelations[j]; } if (relation != null) continue; DataColumn parentKey = table.AddUniqueKey(); // foreign key in the child table DataColumn childKey = _tableChild.AddForeignKey(parentKey); // create relationship // setup relationship between parent and this table relation = new DataRelation(table.TableName + "_" + _tableChild.TableName, parentKey, childKey, true); relation.CheckMultipleNested = false; // disable the check for multiple nested parent relation.Nested = true; _tableChild.DataSet.Relations.Add(relation); relation.CheckMultipleNested = true; // enable the check for multiple nested parent } return table; } internal DataTable InstantiateSimpleTable(DataSet dataSet, XmlElement node) { string typeName; XmlAttributeCollection attrs = node.Attributes; DataTable table; int minOccurs = 1; int maxOccurs = 1; typeName = GetInstanceName(node); table = dataSet.Tables.GetTable(typeName, _schemaUri); if (table != null) { throw ExceptionBuilder.DuplicateDeclaration(typeName); } string tbName = XmlConvert.DecodeName(typeName); table = new DataTable(tbName); // fxcop: new DataTable will either inherit the CaseSensitive, Locale from DataSet or be set during SetProperties table.Namespace = _schemaUri; GetMinMax(node, ref minOccurs, ref maxOccurs); table.MinOccurs = minOccurs; table.MaxOccurs = maxOccurs; SetProperties(table, attrs); table._repeatableElement = true; HandleColumn(node, table); table.Columns[0].ColumnName = tbName + "_Column"; _ds.Tables.Add(table); return table; } } }
// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using CommandLine; using Google.Ads.GoogleAds.Lib; using Google.Ads.GoogleAds.V10.Errors; using Google.Ads.GoogleAds.V10.Resources; using Google.Ads.GoogleAds.V10.Services; using Google.Api.Gax; using System; using System.Collections.Generic; using System.Linq; using static Google.Ads.GoogleAds.V10.Enums.TimeTypeEnum.Types; namespace Google.Ads.GoogleAds.Examples.V10 { /// <summary> /// This example creates a billing setup for a customer. A billing setup is a link between a /// payments account and a customer. The new billing setup can either reuse an existing payments /// account, or create a new payments account with a given payments profile. Billing setups are /// applicable for clients on monthly invoicing only. See here for details about applying for /// monthly invoicing: https://support.google.com/google-ads/answer/2375377 In the case of /// consolidated billing, a payments account is linked to the manager account and is linked to a /// customer account via a billing setup. /// </summary> public class AddBillingSetup : ExampleBase { /// <summary> /// Command line options for running the <see cref="AddBillingSetup"/> example. /// </summary> public class Options : OptionsBase { /// <summary> /// The Google Ads customer ID for which the call is made. /// </summary> [Option("customerId", Required = true, HelpText = "The Google Ads customer ID for which the call is made.")] public long CustomerId { get; set; } /// <summary> /// Optional payments account ID to attach to the new billing setup. Must be formatted /// as "1234-5678-9012-3456". /// </summary> [Option("paymentsAccountId", Required = false, HelpText = "Optional payments account ID to attach to the new billing setup. Must be " + "formatted as '1234-5678-9012-3456'.")] public string PaymentsAccountId { get; set; } /// <summary> /// Optional payments profile ID to attach to a new payments account and to the new /// billing setup. Must be formatted as "1234-5678-9012". /// </summary> [Option("paymentsProfileId", Required = false, HelpText = "Optional payments profile ID to attach to a new payments account and to the new" + " billing setup. Must be formatted as '1234-5678-9012'.")] public string PaymentsProfileId { get; set; } } /// <summary> /// Main method, to run this code example as a standalone application. /// </summary> /// <param name="args">The command line arguments.</param> public static void Main(string[] args) { Options options = new Options(); CommandLine.Parser.Default.ParseArguments<Options>(args).MapResult( delegate (Options o) { options = o; return 0; }, delegate (IEnumerable<Error> errors) { // The Google Ads customer ID for which the call is made. options.CustomerId = long.Parse("INSERT_CUSTOMER_ID_HERE"); // Either a payments account ID or a payments profile ID must be provided for // the example to run successfully. If both are provided, only the payments // account ID will be used. // See: https://developers.google.com/google-ads/api/docs/billing/billing-setups#creating_new_billing_setups // Provide an existing payments account ID to link to the new billing setup. // Must be formatted as "1234-5678-9012-3456". options.PaymentsAccountId = "INSERT_PAYMENTS_ACCOUNT_ID_HERE"; // Alternatively, provide a payments profile ID, which will be linked to a new // payments account and the new billing setup. Must be formatted as // "1234-5678-9012". options.PaymentsProfileId = "INSERT_PAYMENTS_PROFILE_ID_HERE"; return 0; }); AddBillingSetup codeExample = new AddBillingSetup(); Console.WriteLine(codeExample.Description); codeExample.Run(new GoogleAdsClient(), options.CustomerId, options.PaymentsAccountId, options.PaymentsProfileId); } /// <summary> /// Returns a description about the code example. /// </summary> public override string Description => "This example creates a billing setup for a customer. A billing setup is a link " + "between a payments account and a customer. The new billing setup can either reuse " + "an existing payments account, or create a new payments account with a given " + "payments profile.\n" + "Billing setups are applicable for clients on monthly invoicing only. See here for " + "details about applying for monthly invoicing: " + "https://support.google.com/google-ads/answer/2375377\n" + "In the case of consolidated billing, a payments account is linked to the " + "manager account and is linked to a customer account via a billing setup."; /// <summary> /// Runs the code example. Either a payments account ID or a payments profile ID must be /// provided for the example to run successfully. If both are provided, only the payments /// account ID will be used. /// </summary> /// <param name="client">The Google Ads client.</param> /// <param name="customerId">The Google Ads customer ID for which the call is made.</param> /// <param name="paymentsAccountId"> /// Optional payments account ID to attach to the new billing setup. Must be formatted as /// "1234-5678-9012-3456". /// </param> /// <param name="paymentsProfileId"> /// Optional payments profile ID to attach to a new payments account and to the new billing /// setup. Must be formatted as "1234-5678-9012". /// </param> public void Run(GoogleAdsClient client, long customerId, string paymentsAccountId, string paymentsProfileId) { // Gets the GoogleAdsServiceClient. GoogleAdsServiceClient googleAdsService = client.GetService( Services.V10.GoogleAdsService); // Gets the BillingSetupServiceClient. BillingSetupServiceClient billingSetupServiceClient = client.GetService(Services.V10.BillingSetupService); try { // Constructs a new billing setup. BillingSetup billingSetup = CreateBillingSetup(customerId, paymentsAccountId, paymentsProfileId); SetBillingSetupStartDateTime(googleAdsService, customerId, billingSetup); // Creates the billing setup operation. BillingSetupOperation operation = new BillingSetupOperation() { Create = billingSetup }; // Issues a mutate request to add the billing setup. MutateBillingSetupResponse billingResponse = billingSetupServiceClient.MutateBillingSetup(customerId.ToString(), operation); Console.WriteLine("Added new billing setup with resource name: " + $"{billingResponse.Result.ResourceName}"); } catch (GoogleAdsException e) { Console.WriteLine("Failure:"); Console.WriteLine($"Message: {e.Message}"); Console.WriteLine($"Failure: {e.Failure}"); Console.WriteLine($"Request ID: {e.RequestId}"); throw; } } /// <summary> /// Creates and returns a new Billing Setup instance with complete payment details. One of /// paymentsAccountId or paymentsProfileId must be provided. /// </summary> /// <param name="customerId">The Google Ads customer ID for which the call is made.</param> /// <param name="paymentsAccountId"> /// Optional payments account ID to attach to the new billing setup. Must be formatted as /// "1234-5678-9012-3456". /// </param> /// <param name="paymentsProfileId"> /// Optional payments profile ID to attach to a new payments account and to the new billing /// setup. Must be formatted as "1234-5678-9012". /// </param> /// <returns>A new BillingSetup instance with complete payment details.</returns> /// <exception cref="Exception"> /// Generic exception if no payment details have been provided. /// </exception> private BillingSetup CreateBillingSetup(long customerId, string paymentsAccountId, string paymentsProfileId) { BillingSetup billingSetup = new BillingSetup(); // Sets the appropriate payments account field. if (paymentsAccountId != null) { // If a payments account id has been provided, set PaymentsAccount to its resource // name. You can list available payments accounts via the PaymentsAccountService's // ListPaymentsAccounts method. billingSetup.PaymentsAccount = ResourceNames.PaymentsAccount(customerId, paymentsAccountId); } else if (paymentsProfileId != null) { // Otherwise, create a new payments account by setting the PaymentsAccountInfo // field. See https://support.google.com/google-ads/answer/7268503 for information // about payments profiles. billingSetup.PaymentsAccountInfo = new BillingSetup.Types.PaymentsAccountInfo() { PaymentsAccountName = "Payments Account #" + ExampleUtilities.GetRandomString(), PaymentsProfileId = paymentsProfileId }; } else { throw new Exception("No paymentsAccountId or paymentsProfileId provided."); } return billingSetup; } /// <summary> /// Sets the starting and ending date times for the new billing setup. Queries the /// customer's account to see if there are any approved billing setups. If there are any, /// the new billing setup starting date time is set to one day after the last. If not, the /// billing setup is set to start immediately. The ending date is set to one day after the /// starting date time. /// </summary> /// <param name="googleAdsService">The Google Ads service client.</param> /// <param name="customerId">The Google Ads customer ID for which the call is made.</param> /// <param name="billingSetup"> /// The instance of BillingSetup whose starting date time will be set. /// </param> private void SetBillingSetupStartDateTime(GoogleAdsServiceClient googleAdsService, long customerId, BillingSetup billingSetup) { // The query to search existing approved billing setups in the end date time descending // order. See GetBillingSetup.cs for a more detailed example of requesting billing setup // information. string query = @" SELECT billing_setup.end_date_time FROM billing_setup WHERE billing_setup.status = 'APPROVED' ORDER BY billing_setup.end_date_time DESC LIMIT 1"; // Issues a search request. PagedEnumerable<SearchGoogleAdsResponse, GoogleAdsRow> searchResponse = googleAdsService.Search(customerId.ToString(), query); if (searchResponse.Any()) { // Retrieves the ending date time of the last billing setup. string lastEndingDateTimeString = searchResponse.First().BillingSetup.EndDateTime; // A null ending date time indicates that the current billing setup is set to run // indefinitely. Billing setups cannot overlap, so throw an exception in this case. if (lastEndingDateTimeString == null) { throw new Exception("Cannot set starting and ending date times for " + "the new billing setup; the latest existing billing " + "setup is set to run indefinitely."); } DateTime lastEndingDateTime = DateTime.Parse(lastEndingDateTimeString); // Sets the new billing setup to start one day after the ending date time. billingSetup.StartDateTime = lastEndingDateTime.AddDays(1).ToString("yyyy-MM-dd"); // Sets the new billing setup to end one day after the starting date time. billingSetup.EndDateTime = lastEndingDateTime.AddDays(2).ToString("yyyy-MM-dd"); } else { // Otherwise, the only acceptable start time is TimeType.Now. billingSetup.StartTimeType = TimeType.Now; // Sets the new billing setup to end tomorrow. billingSetup.EndDateTime = DateTime.Today.AddDays(1).ToString("yyyy-MM-dd"); } } } }
// dnlib: See LICENSE.txt for more info using System; using System.Collections.Generic; using dnlib.IO; using dnlib.PE; namespace dnlib.DotNet.MD { /// <summary> /// Low level access to a .NET file's metadata /// </summary> public static class MetadataFactory { enum MetadataType { Unknown, Compressed, // #~ (normal) ENC, // #- (edit and continue) } internal static MetadataBase Load(string fileName, CLRRuntimeReaderKind runtime) { IPEImage peImage = null; try { return Load(peImage = new PEImage(fileName), runtime); } catch { if (peImage is not null) peImage.Dispose(); throw; } } internal static MetadataBase Load(byte[] data, CLRRuntimeReaderKind runtime) { IPEImage peImage = null; try { return Load(peImage = new PEImage(data), runtime); } catch { if (peImage is not null) peImage.Dispose(); throw; } } internal static MetadataBase Load(IntPtr addr, CLRRuntimeReaderKind runtime) { IPEImage peImage = null; // We don't know what layout it is. Memory is more common so try that first. try { return Load(peImage = new PEImage(addr, ImageLayout.Memory, true), runtime); } catch { if (peImage is not null) peImage.Dispose(); peImage = null; } try { return Load(peImage = new PEImage(addr, ImageLayout.File, true), runtime); } catch { if (peImage is not null) peImage.Dispose(); throw; } } internal static MetadataBase Load(IntPtr addr, ImageLayout imageLayout, CLRRuntimeReaderKind runtime) { IPEImage peImage = null; try { return Load(peImage = new PEImage(addr, imageLayout, true), runtime); } catch { if (peImage is not null) peImage.Dispose(); throw; } } internal static MetadataBase Load(IPEImage peImage, CLRRuntimeReaderKind runtime) => Create(peImage, runtime, true); /// <summary> /// Create a <see cref="Metadata"/> instance /// </summary> /// <param name="peImage">The PE image</param> /// <returns>A new <see cref="Metadata"/> instance</returns> public static Metadata CreateMetadata(IPEImage peImage) => CreateMetadata(peImage, CLRRuntimeReaderKind.CLR); /// <summary> /// Create a <see cref="Metadata"/> instance /// </summary> /// <param name="peImage">The PE image</param> /// <param name="runtime">Runtime reader kind</param> /// <returns>A new <see cref="Metadata"/> instance</returns> public static Metadata CreateMetadata(IPEImage peImage, CLRRuntimeReaderKind runtime) => Create(peImage, runtime, true); /// <summary> /// Create a <see cref="Metadata"/> instance /// </summary> /// <param name="peImage">The PE image</param> /// <param name="verify"><c>true</c> if we should verify that it's a .NET PE file</param> /// <returns>A new <see cref="Metadata"/> instance</returns> public static Metadata CreateMetadata(IPEImage peImage, bool verify) => CreateMetadata(peImage, CLRRuntimeReaderKind.CLR, verify); /// <summary> /// Create a <see cref="Metadata"/> instance /// </summary> /// <param name="peImage">The PE image</param> /// <param name="runtime">Runtime reader kind</param> /// <param name="verify"><c>true</c> if we should verify that it's a .NET PE file</param> /// <returns>A new <see cref="Metadata"/> instance</returns> public static Metadata CreateMetadata(IPEImage peImage, CLRRuntimeReaderKind runtime, bool verify) => Create(peImage, runtime, verify); /// <summary> /// Create a <see cref="MetadataBase"/> instance /// </summary> /// <param name="peImage">The PE image</param> /// <param name="runtime">Runtime reader kind</param> /// <param name="verify"><c>true</c> if we should verify that it's a .NET PE file</param> /// <returns>A new <see cref="MetadataBase"/> instance</returns> static MetadataBase Create(IPEImage peImage, CLRRuntimeReaderKind runtime, bool verify) { MetadataBase md = null; try { var dotNetDir = peImage.ImageNTHeaders.OptionalHeader.DataDirectories[14]; // Mono doesn't check that the Size field is >= 0x48 if (dotNetDir.VirtualAddress == 0) throw new BadImageFormatException(".NET data directory RVA is 0"); var cor20HeaderReader = peImage.CreateReader(dotNetDir.VirtualAddress, 0x48); var cor20Header = new ImageCor20Header(ref cor20HeaderReader, verify && runtime == CLRRuntimeReaderKind.CLR); if (cor20Header.Metadata.VirtualAddress == 0) throw new BadImageFormatException(".NET metadata RVA is 0"); var mdRva = cor20Header.Metadata.VirtualAddress; // Don't use the size field, Mono ignores it. Create a reader that can read to EOF. var mdHeaderReader = peImage.CreateReader(mdRva); var mdHeader = new MetadataHeader(ref mdHeaderReader, runtime, verify); if (verify) { foreach (var sh in mdHeader.StreamHeaders) { if ((ulong)sh.Offset + sh.StreamSize > mdHeaderReader.EndOffset) throw new BadImageFormatException("Invalid stream header"); } } md = GetMetadataType(mdHeader.StreamHeaders, runtime) switch { MetadataType.Compressed => new CompressedMetadata(peImage, cor20Header, mdHeader, runtime), MetadataType.ENC => new ENCMetadata(peImage, cor20Header, mdHeader, runtime), _ => throw new BadImageFormatException("No #~ or #- stream found"), }; md.Initialize(null); return md; } catch { if (md is not null) md.Dispose(); throw; } } /// <summary> /// Create a standalone portable PDB <see cref="MetadataBase"/> instance /// </summary> /// <param name="mdReaderFactory">Metadata stream</param> /// <param name="verify"><c>true</c> if we should verify that it's a .NET PE file</param> /// <returns>A new <see cref="MetadataBase"/> instance</returns> internal static MetadataBase CreateStandalonePortablePDB(DataReaderFactory mdReaderFactory, bool verify) { const CLRRuntimeReaderKind runtime = CLRRuntimeReaderKind.CLR; MetadataBase md = null; try { var reader = mdReaderFactory.CreateReader(); var mdHeader = new MetadataHeader(ref reader, runtime, verify); if (verify) { foreach (var sh in mdHeader.StreamHeaders) { if (sh.Offset + sh.StreamSize < sh.Offset || sh.Offset + sh.StreamSize > reader.Length) throw new BadImageFormatException("Invalid stream header"); } } md = GetMetadataType(mdHeader.StreamHeaders, runtime) switch { MetadataType.Compressed => new CompressedMetadata(mdHeader, true, runtime), MetadataType.ENC => new ENCMetadata(mdHeader, true, runtime), _ => throw new BadImageFormatException("No #~ or #- stream found"), }; md.Initialize(mdReaderFactory); return md; } catch { md?.Dispose(); throw; } } static MetadataType GetMetadataType(IList<StreamHeader> streamHeaders, CLRRuntimeReaderKind runtime) { MetadataType? mdType = null; if (runtime == CLRRuntimeReaderKind.CLR) { foreach (var sh in streamHeaders) { if (mdType is null) { if (sh.Name == "#~") mdType = MetadataType.Compressed; else if (sh.Name == "#-") mdType = MetadataType.ENC; } if (sh.Name == "#Schema") mdType = MetadataType.ENC; } } else if (runtime == CLRRuntimeReaderKind.Mono) { foreach (var sh in streamHeaders) { if (sh.Name == "#~") mdType = MetadataType.Compressed; else if (sh.Name == "#-") { mdType = MetadataType.ENC; break; } } } else throw new ArgumentOutOfRangeException(nameof(runtime)); if (mdType is null) return MetadataType.Unknown; return mdType.Value; } } }
using System; using OpenCvSharp.Internal; namespace OpenCvSharp { // ReSharper disable InconsistentNaming /// <summary> /// K nearest neigbours algorithm /// </summary> public class BackgroundSubtractorKNN : BackgroundSubtractor { /// <summary> /// cv::Ptr&lt;T&gt; /// </summary> private Ptr? objectPtr; #region Init & Disposal /// <summary> /// Creates KNN Background Subtractor /// </summary> /// <param name="history">Length of the history.</param> /// <param name="dist2Threshold">Threshold on the squared distance between the pixel and the sample to decide /// whether a pixel is close to that sample. This parameter does not affect the background update.</param> /// <param name="detectShadows">If true, the algorithm will detect shadows and mark them. It decreases the /// speed a bit, so if you do not need this feature, set the parameter to false.</param> /// <returns></returns> public static BackgroundSubtractorKNN Create( int history = 500, double dist2Threshold = 400.0, bool detectShadows = true) { NativeMethods.HandleException( NativeMethods.video_createBackgroundSubtractorKNN( history, dist2Threshold, detectShadows ? 1 : 0, out var ptr)); return new BackgroundSubtractorKNN(ptr); } internal BackgroundSubtractorKNN(IntPtr ptr) { objectPtr = new Ptr(ptr); this.ptr = objectPtr.Get(); } /// <summary> /// Releases managed resources /// </summary> protected override void DisposeManaged() { objectPtr?.Dispose(); objectPtr = null; base.DisposeManaged(); } #endregion #region Properties /// <summary> /// Gets or sets the number of last frames that affect the background model. /// </summary> public int History { get { ThrowIfDisposed(); NativeMethods.HandleException( NativeMethods.video_BackgroundSubtractorKNN_getHistory(ptr, out var ret)); GC.KeepAlive(this); return ret; } set { ThrowIfDisposed(); NativeMethods.HandleException( NativeMethods.video_BackgroundSubtractorKNN_setHistory(ptr, value)); GC.KeepAlive(this); } } /// <summary> /// Gets or sets the number of data samples in the background model /// </summary> public int NSamples { get { ThrowIfDisposed(); NativeMethods.HandleException( NativeMethods.video_BackgroundSubtractorKNN_getNSamples(ptr, out var ret)); GC.KeepAlive(this); return ret; } set { ThrowIfDisposed(); NativeMethods.HandleException( NativeMethods.video_BackgroundSubtractorKNN_setNSamples(ptr, value)); GC.KeepAlive(this); } } /// <summary> /// Gets or sets the threshold on the squared distance between the pixel and the sample. /// The threshold on the squared distance between the pixel and the sample to decide whether a pixel is close to a data sample. /// </summary> public double Dist2Threshold { get { ThrowIfDisposed(); NativeMethods.HandleException( NativeMethods.video_BackgroundSubtractorKNN_getDist2Threshold(ptr, out var ret)); GC.KeepAlive(this); return ret; } set { ThrowIfDisposed(); NativeMethods.HandleException( NativeMethods.video_BackgroundSubtractorKNN_setDist2Threshold(ptr, value)); GC.KeepAlive(this); } } /// <summary> /// Returns the number of neighbours, the k in the kNN. /// K is the number of samples that need to be within dist2Threshold in order to decide that that /// pixel is matching the kNN background model. /// </summary> public int KNNSamples { get { ThrowIfDisposed(); NativeMethods.HandleException( NativeMethods.video_BackgroundSubtractorKNN_getkNNSamples(ptr, out var ret)); GC.KeepAlive(this); return ret; } set { ThrowIfDisposed(); NativeMethods.HandleException( NativeMethods.video_BackgroundSubtractorKNN_setkNNSamples(ptr, value)); GC.KeepAlive(this); } } /// <summary> /// Returns the shadow detection flag. /// If true, the algorithm detects shadows and marks them. See createBackgroundSubtractorKNN for details. /// </summary> public bool DetectShadows { get { ThrowIfDisposed(); NativeMethods.HandleException( NativeMethods.video_BackgroundSubtractorKNN_getDetectShadows(ptr, out var ret)); GC.KeepAlive(this); return ret != 0; } set { ThrowIfDisposed(); NativeMethods.HandleException( NativeMethods.video_BackgroundSubtractorKNN_setDetectShadows(ptr, value ? 1 : 0)); GC.KeepAlive(this); } } /// <summary> /// Gets or sets the shadow value. /// Shadow value is the value used to mark shadows in the foreground mask. Default value is 127. /// Value 0 in the mask always means background, 255 means foreground. /// </summary> public int ShadowValue { get { ThrowIfDisposed(); NativeMethods.HandleException( NativeMethods.video_BackgroundSubtractorKNN_getShadowValue(ptr, out var ret)); GC.KeepAlive(this); return ret; } set { ThrowIfDisposed(); NativeMethods.HandleException( NativeMethods.video_BackgroundSubtractorKNN_setShadowValue(ptr, value)); GC.KeepAlive(this); } } /// <summary> /// Gets or sets the shadow threshold. /// A shadow is detected if pixel is a darker version of the background. The shadow threshold (Tau in /// the paper) is a threshold defining how much darker the shadow can be. Tau= 0.5 means that if a pixel /// is more than twice darker then it is not shadow. See Prati, Mikic, Trivedi and Cucchiara, /// *Detecting Moving Shadows...*, IEEE PAMI,2003. /// </summary> public double ShadowThreshold { get { ThrowIfDisposed(); NativeMethods.HandleException( NativeMethods.video_BackgroundSubtractorKNN_getShadowThreshold(ptr, out var ret)); GC.KeepAlive(this); return ret; } set { ThrowIfDisposed(); NativeMethods.HandleException( NativeMethods.video_BackgroundSubtractorKNN_setShadowThreshold(ptr, value)); GC.KeepAlive(this); } } #endregion internal class Ptr : OpenCvSharp.Ptr { public Ptr(IntPtr ptr) : base(ptr) { } public override IntPtr Get() { NativeMethods.HandleException( NativeMethods.video_Ptr_BackgroundSubtractorKNN_get(ptr, out var ret)); GC.KeepAlive(this); return ret; } protected override void DisposeUnmanaged() { NativeMethods.HandleException( NativeMethods.video_Ptr_BackgroundSubtractorKNN_delete(ptr)); base.DisposeUnmanaged(); } } } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Diagnostics; using System.IO; using System.IO.Ports; using System.Linq; using System.Management; using System.Net; using System.Threading; using System.Threading.Tasks; using GalaSoft.MvvmLight; using GalaSoft.MvvmLight.Command; using GalaSoft.MvvmLight.Views; using MarkdownDeep; using MdkControllerUpdate.Extensions; using MdkControllerUpdate.Messages; using MdkControllerUpdate.Model; using Octokit; using Octokit.Internal; using Release = MdkControllerUpdate.Model.Release; namespace MdkControllerUpdate.ViewModel { class MainViewModel : ViewModelBase { private const string RepositoryOwner = "milindur"; private const string RepositoryName = "MdkController"; private const string GitHubToken = "1ee8bba55fd719d69ba64751c7e88258cf5044cd"; private readonly IDialogService _dialogService; public MainViewModel(IDialogService dialogService) { _dialogService = dialogService; } bool _isBusy; public bool IsBusy { get { return _isBusy; } private set { _isBusy = value; RaisePropertyChanged(() => IsBusy); RefreshPortsCommand.RaiseCanExecuteChanged(); RefreshReleasesCommand.RaiseCanExecuteChanged(); UpdateCommand.RaiseCanExecuteChanged(); UpdateFromFileCommand.RaiseCanExecuteChanged(); } } public string Title => $"MDK Controller Update v{Version}"; public Version Version => typeof(App).Assembly.GetName().Version; public ObservableCollection<PortName> Ports { get; } = new ObservableCollection<PortName>(); public ObservableCollection<Release> Releases { get; } = new ObservableCollection<Release>(); private PortName _selectedComPort; public PortName SelectedComPort { get { return _selectedComPort; } set { _selectedComPort = value; RaisePropertyChanged(() => SelectedComPort); UpdateCommand.RaiseCanExecuteChanged(); UpdateFromFileCommand.RaiseCanExecuteChanged(); } } private Release _selectedRelease; public Release SelectedRelease { get { return _selectedRelease; } set { _selectedRelease = value; RaisePropertyChanged(() => SelectedRelease); RaisePropertyChanged(() => SelectedReleaseDescriptionAsHtml); UpdateCommand.RaiseCanExecuteChanged(); UpdateFromFileCommand.RaiseCanExecuteChanged(); } } public string SelectedReleaseDescriptionAsHtml { get { if (_selectedRelease == null) return null; return new Markdown().Transform(_selectedRelease.Description); } } private RelayCommand _refreshPortsCommand; public RelayCommand RefreshPortsCommand { get { return _refreshPortsCommand ?? (_refreshPortsCommand = new RelayCommand(() => { IsBusy = true; using (var searcher = new ManagementObjectSearcher("SELECT * FROM WIN32_SerialPort")) { var portNames = SerialPort.GetPortNames(); var portObjects = searcher.Get().Cast<ManagementBaseObject>().ToList(); Ports.Clear(); portNames.Join(portObjects, o => o, i => i["DeviceID"].ToString(), (o, i) => new PortName { ComPort = o, Name = i["Caption"].ToString(), PnpDeviceId = i["PNPDeviceID"].ToString() }) .Where(p => p.PnpDeviceId.Contains("USB\\VID_2341&PID_003D") || p.PnpDeviceId.Contains("USB\\VID_2A03&PID_003D")) .ToList() .ForEach(Ports.Add); } SelectedComPort = Ports.FirstOrDefault(); IsBusy = false; }, () => !IsBusy)); } } private RelayCommand _refreshReleasesCommand; public RelayCommand RefreshReleasesCommand { get { return _refreshReleasesCommand ?? (_refreshReleasesCommand = new RelayCommand(async () => { IsBusy = true; try { var gitHub = new GitHubClient(new ProductHeaderValue("MdkControllerUpdate"), new InMemoryCredentialStore(new Credentials(GitHubToken))); var releases = await gitHub.Release.GetAll(RepositoryOwner, RepositoryName); var results = new List<Release>(); foreach (var release in releases) { var assets = await gitHub.Release.GetAllAssets(RepositoryOwner, RepositoryName, release.Id); var firmwareAsset = assets.SingleOrDefault(ra => ra.Name.ToLowerInvariant().EndsWith(".bin")); //if (firmwareAsset == null) continue; results.Add(new Release { Label = release.Name, Description = release.Body, CreatedOn = release.CreatedAt, FirmwareUri = firmwareAsset?.BrowserDownloadUrl }); } Releases.Clear(); results.ForEach(Releases.Add); SelectedRelease = Releases.FirstOrDefault(); } catch (Exception ex) { await _dialogService.ShowMessage($"Error while receiving available releases.\r\nPlease make sure that you are connected to the Internet.\r\n\r\n{ex.Message}", "Releases"); } finally { IsBusy = false; } }, () => !IsBusy)); } } private RelayCommand _updateCommand; public RelayCommand UpdateCommand { get { return _updateCommand ?? (_updateCommand = new RelayCommand(async () => { IsBusy = true; var firmwareFilePath = Path.GetTempFileName(); await new WebClient().DownloadFileTaskAsync(SelectedRelease.FirmwareUri, firmwareFilePath); await UpdateFirmware(firmwareFilePath); }, () => SelectedComPort != null && SelectedRelease?.FirmwareUri != null && !IsBusy)); } } private RelayCommand _updateFromFileCommand; public RelayCommand UpdateFromFileCommand { get { return _updateFromFileCommand ?? (_updateFromFileCommand = new RelayCommand(async () => { IsBusy = true; var fileSelected = new TaskCompletionSource<string>(); var fileSelectedTask = fileSelected.Task; MessengerInstance.Send(new FileOpenDialogMessage(fileName => { fileSelected.SetResult(fileName); }, () => { fileSelected.SetCanceled(); })); try { var firmwareFilePath = await fileSelectedTask; await UpdateFirmware(firmwareFilePath); } catch (AggregateException ae) { if (ae.GetBaseException() is TaskCanceledException) return; await _dialogService.ShowMessage("Firmware could not be updated.", "Update"); } }, () => SelectedComPort != null && !IsBusy)); } } private async Task UpdateFirmware(string firmwareFilePath) { using (var sp = new SerialPort(SelectedComPort.ComPort, 1200, Parity.None, 8, StopBits.One)) { sp.Open(); Thread.Sleep(100); sp.Close(); } var bossaFilePath = Path.Combine(Path.GetTempPath(), @"bossac.exe"); File.WriteAllBytes(bossaFilePath, Properties.Resources.BossaCmd); var flashProcess = new Process { StartInfo = new ProcessStartInfo( bossaFilePath, $"--port={SelectedComPort.ComPort} -U false -e -w -v -b \"{firmwareFilePath}\" -R") }; flashProcess.Start(); await flashProcess.WaitForExitAsync(); var exitCode = flashProcess.ExitCode; if (exitCode != 0) { await _dialogService.ShowMessage("Firmware could not be updated.", "Update"); } else { await _dialogService.ShowMessage("Firmware update was successful.", "Update"); } IsBusy = false; } } }
//------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All rights reserved. //------------------------------------------------------------ namespace System.ServiceModel.Security { using System.IdentityModel; using System.IdentityModel.Selectors; using System.IdentityModel.Tokens; using System.Runtime; using System.Runtime.CompilerServices; using System.Xml; using DictionaryManager = System.IdentityModel.DictionaryManager; using ISecurityElement = System.IdentityModel.ISecurityElement; [TypeForwardedFrom("System.ServiceModel, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")] abstract class EncryptedType : ISecurityElement { internal static readonly XmlDictionaryString NamespaceUri = XD.XmlEncryptionDictionary.Namespace; internal static readonly XmlDictionaryString EncodingAttribute = XD.XmlEncryptionDictionary.Encoding; internal static readonly XmlDictionaryString MimeTypeAttribute = XD.XmlEncryptionDictionary.MimeType; internal static readonly XmlDictionaryString TypeAttribute = XD.XmlEncryptionDictionary.Type; internal static readonly XmlDictionaryString CipherDataElementName = XD.XmlEncryptionDictionary.CipherData; internal static readonly XmlDictionaryString CipherValueElementName = XD.XmlEncryptionDictionary.CipherValue; string encoding; EncryptionMethodElement encryptionMethod; string id; string wsuId; SecurityKeyIdentifier keyIdentifier; string mimeType; EncryptionState state; string type; SecurityTokenSerializer tokenSerializer; bool shouldReadXmlReferenceKeyInfoClause; protected EncryptedType() { this.encryptionMethod.Init(); this.state = EncryptionState.New; this.tokenSerializer = new KeyInfoSerializer(false); } public string Encoding { get { return this.encoding; } set { this.encoding = value; } } public string EncryptionMethod { get { return this.encryptionMethod.algorithm; } set { this.encryptionMethod.algorithm = value; } } public XmlDictionaryString EncryptionMethodDictionaryString { get { return this.encryptionMethod.algorithmDictionaryString; } set { this.encryptionMethod.algorithmDictionaryString = value; } } public bool HasId { get { return true; } } public string Id { get { return this.id; } set { this.id = value; } } // This is set to true on the client side. And this means that when this knob is set to true and the default serializers on the client side fail // to read the KeyInfo clause from the incoming response message from a service; then the ckient should // try to read the keyInfo clause as GenericXmlSecurityKeyIdentifierClause before throwing. public bool ShouldReadXmlReferenceKeyInfoClause { get { return this.shouldReadXmlReferenceKeyInfoClause; } set { this.shouldReadXmlReferenceKeyInfoClause = value; } } public string WsuId { get { return this.wsuId; } set { this.wsuId = value; } } public SecurityKeyIdentifier KeyIdentifier { get { return this.keyIdentifier; } set { this.keyIdentifier = value; } } public string MimeType { get { return this.mimeType; } set { this.mimeType = value; } } public string Type { get { return this.type; } set { this.type = value; } } protected abstract XmlDictionaryString OpeningElementName { get; } protected EncryptionState State { get { return this.state; } set { this.state = value; } } public SecurityTokenSerializer SecurityTokenSerializer { get { return this.tokenSerializer; } set { this.tokenSerializer = value ?? new KeyInfoSerializer(false); } } protected abstract void ForceEncryption(); protected virtual void ReadAdditionalAttributes(XmlDictionaryReader reader) { } protected virtual void ReadAdditionalElements(XmlDictionaryReader reader) { } protected abstract void ReadCipherData(XmlDictionaryReader reader); protected abstract void ReadCipherData(XmlDictionaryReader reader, long maxBufferSize); public void ReadFrom(XmlReader reader) { ReadFrom(reader, 0); } public void ReadFrom(XmlDictionaryReader reader) { ReadFrom(reader, 0); } public void ReadFrom(XmlReader reader, long maxBufferSize) { ReadFrom(XmlDictionaryReader.CreateDictionaryReader(reader), maxBufferSize); } public void ReadFrom(XmlDictionaryReader reader, long maxBufferSize) { ValidateReadState(); reader.MoveToStartElement(OpeningElementName, NamespaceUri); this.encoding = reader.GetAttribute(EncodingAttribute, null); this.id = reader.GetAttribute(XD.XmlEncryptionDictionary.Id, null) ?? SecurityUniqueId.Create().Value; this.wsuId = reader.GetAttribute(XD.XmlEncryptionDictionary.Id, XD.UtilityDictionary.Namespace) ?? SecurityUniqueId.Create().Value; this.mimeType = reader.GetAttribute(MimeTypeAttribute, null); this.type = reader.GetAttribute(TypeAttribute, null); ReadAdditionalAttributes(reader); reader.Read(); if (reader.IsStartElement(EncryptionMethodElement.ElementName, NamespaceUri)) { this.encryptionMethod.ReadFrom(reader); } if (this.tokenSerializer.CanReadKeyIdentifier(reader)) { XmlElement xml = null; XmlDictionaryReader localReader; if (this.ShouldReadXmlReferenceKeyInfoClause) { // We create the dom only when needed to not affect perf. XmlDocument doc = new XmlDocument(); xml = (doc.ReadNode(reader) as XmlElement); localReader = XmlDictionaryReader.CreateDictionaryReader(new XmlNodeReader(xml)); } else { localReader = reader; } try { this.KeyIdentifier = this.tokenSerializer.ReadKeyIdentifier(localReader); } catch (Exception e) { // In case when the issued token ( custom token) is used as an initiator token; we will fail // to read the keyIdentifierClause using the plugged in default serializer. So We need to try to read it as an XmlReferencekeyIdentifierClause // if it is the client side. if (Fx.IsFatal(e) || !this.ShouldReadXmlReferenceKeyInfoClause) { throw; } this.keyIdentifier = ReadGenericXmlSecurityKeyIdentifier( XmlDictionaryReader.CreateDictionaryReader( new XmlNodeReader(xml)), e); } } reader.ReadStartElement(CipherDataElementName, EncryptedType.NamespaceUri); reader.ReadStartElement(CipherValueElementName, EncryptedType.NamespaceUri); if (maxBufferSize == 0) ReadCipherData(reader); else ReadCipherData(reader, maxBufferSize); reader.ReadEndElement(); // CipherValue reader.ReadEndElement(); // CipherData ReadAdditionalElements(reader); reader.ReadEndElement(); // OpeningElementName this.State = EncryptionState.Read; } private SecurityKeyIdentifier ReadGenericXmlSecurityKeyIdentifier(XmlDictionaryReader localReader, Exception previousException) { if (!localReader.IsStartElement(XD.XmlSignatureDictionary.KeyInfo, XD.XmlSignatureDictionary.Namespace)) { return null; } localReader.ReadStartElement(XD.XmlSignatureDictionary.KeyInfo, XD.XmlSignatureDictionary.Namespace); SecurityKeyIdentifier keyIdentifier = new SecurityKeyIdentifier(); if (localReader.IsStartElement()) { SecurityKeyIdentifierClause clause = null; string strId = localReader.GetAttribute(XD.UtilityDictionary.IdAttribute, XD.UtilityDictionary.Namespace); XmlDocument doc = new XmlDocument(); XmlElement keyIdentifierReferenceXml = (doc.ReadNode(localReader) as XmlElement); clause = new GenericXmlSecurityKeyIdentifierClause(keyIdentifierReferenceXml); if (!string.IsNullOrEmpty(strId)) clause.Id = strId; keyIdentifier.Add(clause); } if (keyIdentifier.Count == 0) throw previousException; localReader.ReadEndElement(); return keyIdentifier; } protected virtual void WriteAdditionalAttributes(XmlDictionaryWriter writer, DictionaryManager dictionaryManager) { } protected virtual void WriteAdditionalElements(XmlDictionaryWriter writer, DictionaryManager dictionaryManager) { } protected abstract void WriteCipherData(XmlDictionaryWriter writer); public void WriteTo(XmlDictionaryWriter writer, DictionaryManager dictionaryManager) { if (writer == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("writer"); } ValidateWriteState(); writer.WriteStartElement(XmlEncryptionStrings.Prefix, this.OpeningElementName, NamespaceUri); if (this.id != null && this.id.Length != 0) { writer.WriteAttributeString(XD.XmlEncryptionDictionary.Id, null, this.Id); } if (this.type != null) { writer.WriteAttributeString(TypeAttribute, null, this.Type); } if (this.mimeType != null) { writer.WriteAttributeString(MimeTypeAttribute, null, this.MimeType); } if (this.encoding != null) { writer.WriteAttributeString(EncodingAttribute, null, this.Encoding); } WriteAdditionalAttributes(writer, dictionaryManager); if (this.encryptionMethod.algorithm != null) { this.encryptionMethod.WriteTo(writer); } if (this.KeyIdentifier != null) { this.tokenSerializer.WriteKeyIdentifier(writer, this.KeyIdentifier); } writer.WriteStartElement(CipherDataElementName, NamespaceUri); writer.WriteStartElement(CipherValueElementName, NamespaceUri); WriteCipherData(writer); writer.WriteEndElement(); // CipherValue writer.WriteEndElement(); // CipherData WriteAdditionalElements(writer, dictionaryManager); writer.WriteEndElement(); // OpeningElementName } void ValidateReadState() { if (this.State != EncryptionState.New) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new SecurityMessageSerializationException(SR.GetString(SR.BadEncryptionState))); } } void ValidateWriteState() { if (this.State == EncryptionState.EncryptionSetup) { ForceEncryption(); } else if (this.State == EncryptionState.New) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new SecurityMessageSerializationException(SR.GetString(SR.BadEncryptionState))); } } protected enum EncryptionState { New, Read, DecryptionSetup, Decrypted, EncryptionSetup, Encrypted } struct EncryptionMethodElement { internal string algorithm; internal XmlDictionaryString algorithmDictionaryString; internal static readonly XmlDictionaryString ElementName = XD.XmlEncryptionDictionary.EncryptionMethod; public void Init() { this.algorithm = null; } public void ReadFrom(XmlDictionaryReader reader) { reader.MoveToStartElement(ElementName, XD.XmlEncryptionDictionary.Namespace); bool isEmptyElement = reader.IsEmptyElement; this.algorithm = reader.GetAttribute(XD.XmlSignatureDictionary.Algorithm, null); if (this.algorithm == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new SecurityMessageSerializationException( SR.GetString(SR.RequiredAttributeMissing, XD.XmlSignatureDictionary.Algorithm.Value, ElementName.Value))); } reader.Read(); if (!isEmptyElement) { while (reader.IsStartElement()) { reader.Skip(); } reader.ReadEndElement(); } } public void WriteTo(XmlDictionaryWriter writer) { writer.WriteStartElement(XmlEncryptionStrings.Prefix, ElementName, XD.XmlEncryptionDictionary.Namespace); if (this.algorithmDictionaryString != null) { writer.WriteStartAttribute(XD.XmlSignatureDictionary.Algorithm, null); writer.WriteString(this.algorithmDictionaryString); writer.WriteEndAttribute(); } else { writer.WriteAttributeString(XD.XmlSignatureDictionary.Algorithm, null, this.algorithm); } if (this.algorithm == XD.SecurityAlgorithmDictionary.RsaOaepKeyWrap.Value) { writer.WriteStartElement(XmlSignatureStrings.Prefix, XD.XmlSignatureDictionary.DigestMethod, XD.XmlSignatureDictionary.Namespace); writer.WriteStartAttribute(XD.XmlSignatureDictionary.Algorithm, null); writer.WriteString(XD.SecurityAlgorithmDictionary.Sha1Digest); writer.WriteEndAttribute(); writer.WriteEndElement(); } writer.WriteEndElement(); // EncryptionMethod } } } }
// // Copyright 2011-2014, Xamarin Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // using System; using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; using System.Text; using Android.Provider; using Uri = Android.Net.Uri; namespace Xamarin { internal class ContentQueryTranslator : ExpressionVisitor { private readonly List<String> arguments = new List<String>(); private readonly IQueryProvider provider; private readonly StringBuilder queryBuilder = new StringBuilder(); private readonly ITableFinder tableFinder; private Boolean fallback = false; private List<ContentResolverColumnMapping> projections; private StringBuilder sortBuilder; public ContentQueryTranslator( IQueryProvider provider, ITableFinder tableFinder ) { this.provider = provider; this.tableFinder = tableFinder; Skip = -1; Take = -1; } public String[] ClauseParameters { get { return (arguments.Count > 0) ? arguments.ToArray() : null; } } public Boolean IsAny { get; private set; } public Boolean IsCount { get; private set; } public IEnumerable<ContentResolverColumnMapping> Projections { get { return projections; } } public String QueryString { get { return (queryBuilder.Length > 0) ? queryBuilder.ToString() : null; } } public Type ReturnType { get; private set; } public Int32 Skip { get; private set; } public String SortString { get { return (sortBuilder != null) ? sortBuilder.ToString() : null; } } public Uri Table { get; private set; } public Int32 Take { get; private set; } public Expression Translate( Expression expression ) { Expression expr = Visit( expression ); if(Table == null) { Table = tableFinder.DefaultTable; } return expr; } protected override Expression VisitMethodCall( MethodCallExpression methodCall ) { if(methodCall.Arguments.Count == 0 || !(methodCall.Arguments[0] is ConstantExpression || methodCall.Arguments[0] is MethodCallExpression)) { fallback = true; return methodCall; } Expression expression = base.VisitMethodCall( methodCall ); methodCall = expression as MethodCallExpression; if(methodCall == null) { fallback = true; return expression; } if(!fallback) { if(methodCall.Method.Name == "Where") { expression = VisitWhere( methodCall ); } else if(methodCall.Method.Name == "Any") { expression = VisitAny( methodCall ); } else if(methodCall.Method.Name == "Select") { expression = VisitSelect( methodCall ); } else if(methodCall.Method.Name == "SelectMany") { expression = VisitSelectMany( methodCall ); } else if(methodCall.Method.Name == "OrderBy" || methodCall.Method.Name == "OrderByDescending") { expression = VisitOrder( methodCall ); } else if(methodCall.Method.Name == "Skip") { expression = VisitSkip( methodCall ); } else if(methodCall.Method.Name == "Take") { expression = VisitTake( methodCall ); } else if(methodCall.Method.Name == "Count") { expression = VisitCount( methodCall ); } else if(methodCall.Method.Name == "First" || methodCall.Method.Name == "FirstOrDefault") { expression = VisitFirst( methodCall ); } else if(methodCall.Method.Name == "Single" || methodCall.Method.Name == "SingleOrDefault") { expression = VisitSingle( methodCall ); } } return expression; } private MemberExpression FindMemberExpression( Expression expression ) { UnaryExpression ue = expression as UnaryExpression; if(ue != null) { expression = ue.Operand; } LambdaExpression le = expression as LambdaExpression; if(le != null) { expression = le.Body; } MemberExpression me = expression as MemberExpression; if(me != null && tableFinder.IsSupportedType( me.Member.DeclaringType )) { return me; } BinaryExpression be = expression as BinaryExpression; if(be != null) { me = be.Left as MemberExpression; if(me != null && tableFinder.IsSupportedType( me.Member.DeclaringType )) { return me; } me = be.Right as MemberExpression; if(me != null && tableFinder.IsSupportedType( me.Member.DeclaringType )) { return me; } } return null; } private Type GetExpressionArgumentType( Expression expression ) { switch(expression.NodeType) { case ExpressionType.Constant: return ((ConstantExpression)expression).Value.GetType(); } return null; } private Boolean TryGetTable( List<MemberExpression> memberExpressions ) { if(memberExpressions.Count == 0) { fallback = true; return false; } Uri existingTable = Table; TableFindResult presult = null; foreach(MemberExpression me in memberExpressions) { TableFindResult result = tableFinder.Find( me ); if(result.Table == null) { fallback = true; return false; } if(existingTable == null) { existingTable = result.Table; presult = result; } else if(existingTable != result.Table) { fallback = true; return false; } } if(presult == null) { fallback = true; return false; } Table = presult.Table; if(presult.MimeType != null) { if(queryBuilder.Length > 0) { queryBuilder.Append( " AND " ); } queryBuilder.Append( String.Format( "({0} = ?)", ContactsContract.DataColumns.Mimetype ) ); } arguments.Add( presult.MimeType ); return true; } private Boolean TryGetTable( MemberExpression me ) { if(me == null) { fallback = true; return false; } TableFindResult result = tableFinder.Find( me ); if(result.MimeType != null) { if(queryBuilder.Length > 0) { queryBuilder.Append( " AND " ); } queryBuilder.Append( String.Format( "({0} = ?)", ContactsContract.DataColumns.Mimetype ) ); } arguments.Add( result.MimeType ); if(Table == null) { Table = result.Table; } else if(Table != result.Table) { fallback = true; return false; } return true; } private Expression VisitAny( MethodCallExpression methodCall ) { if(methodCall.Arguments.Count > 1) { VisitWhere( methodCall ); if(fallback) { return methodCall; } } IsAny = true; return methodCall.Arguments[0]; } private Expression VisitCount( MethodCallExpression methodCall ) { if(methodCall.Arguments.Count > 1) { VisitWhere( methodCall ); if(fallback) { return methodCall; } } IsCount = true; return methodCall.Arguments[0]; } private Expression VisitFirst( MethodCallExpression methodCall ) { if(methodCall.Arguments.Count > 1) { VisitWhere( methodCall ); if(fallback) { return methodCall; } } Take = 1; return methodCall; } private Expression VisitOrder( MethodCallExpression methodCall ) { MemberExpression me = FindMemberExpression( methodCall.Arguments[1] ); if(!TryGetTable( me )) { return methodCall; } ContentResolverColumnMapping column = tableFinder.GetColumn( me.Member ); if(column != null && column.Columns != null) { StringBuilder builder = sortBuilder ?? (sortBuilder = new StringBuilder()); if(builder.Length > 0) { builder.Append( ", " ); } if(column.Columns.Length > 1) { throw new NotSupportedException(); } builder.Append( column.Columns[0] ); if(methodCall.Method.Name == "OrderByDescending") { builder.Append( " DESC" ); } return methodCall.Arguments[0]; } return methodCall; } private Expression VisitSelect( MethodCallExpression methodCall ) { MemberExpression me = FindMemberExpression( methodCall.Arguments[1] ); if(!TryGetTable( me )) { return methodCall; } ContentResolverColumnMapping column = tableFinder.GetColumn( me.Member ); if(column == null || column.Columns == null) { return methodCall; } (projections ?? (projections = new List<ContentResolverColumnMapping>())).Add( column ); if(column.ReturnType.IsValueType || column.ReturnType == typeof(String)) { ReturnType = column.ReturnType; } fallback = true; Type argType = GetExpressionArgumentType( methodCall.Arguments[0] ); if(ReturnType == null || (argType != null && ReturnType.IsAssignableFrom( argType ))) { return methodCall.Arguments[0]; } return Expression.Constant( Activator.CreateInstance( typeof(Query<>).MakeGenericType( ReturnType ), provider ) ); } // private Expression VisitSelect (MethodCallExpression methodCall) // { // List<MemberExpression> mes = MemberExpressionFinder.Find (methodCall.Arguments[1], this.tableFinder); // if (!TryGetTable (mes)) // return methodCall; // // Type returnType = null; // // List<Tuple<string, Type>> projs = new List<Tuple<string, Type>>(); // foreach (MemberExpression me in mes) // { // Tuple<string, Type> column = this.tableFinder.GetColumn (me.Member); // if (column == null) // return methodCall; // // if (returnType == null) // returnType = column.Item2; // if (returnType != column.Item2) // return methodCall; // // projs.Add (column); // } // // ReturnType = returnType; // this.fallback = true; // // (this.projections ?? (this.projections = new List<Tuple<string, Type>>())) // .AddRange (projs); // // return methodCall.Arguments[0]; // } private Expression VisitSelectMany( MethodCallExpression methodCall ) { List<MemberExpression> mes = MemberExpressionFinder.Find( methodCall, tableFinder ); if(mes.Count > 1) { fallback = true; return methodCall; } if(!TryGetTable( mes )) { return methodCall; } ContentResolverColumnMapping column = tableFinder.GetColumn( mes[0].Member ); if(column == null || column.ReturnType.GetGenericTypeDefinition() != typeof(IEnumerable<>)) { fallback = true; return methodCall; } ReturnType = column.ReturnType.GetGenericArguments()[0]; return Expression.Constant( Activator.CreateInstance( typeof(Query<>).MakeGenericType( ReturnType ), provider ) ); //return methodCall.Arguments[0]; } private Expression VisitSingle( MethodCallExpression methodCall ) { if(methodCall.Arguments.Count > 1) { VisitWhere( methodCall ); if(fallback) { return methodCall; } } Take = 2; return methodCall; } private Expression VisitSkip( MethodCallExpression methodCall ) { ConstantExpression ce = (ConstantExpression)methodCall.Arguments[1]; Skip = (Int32)ce.Value; return methodCall.Arguments[0]; } private Expression VisitTake( MethodCallExpression methodCall ) { ConstantExpression ce = (ConstantExpression)methodCall.Arguments[1]; Take = (Int32)ce.Value; return methodCall.Arguments[0]; } private Expression VisitWhere( MethodCallExpression methodCall ) { Expression expression = ExpressionEvaluator.Evaluate( methodCall ); var eval = new WhereEvaluator( tableFinder, Table ); expression = eval.Evaluate( expression ); if(eval.Fallback || eval.Table == null || (Table != null && eval.Table != Table)) { fallback = true; return methodCall; } if(Table == null) { Table = eval.Table; } arguments.AddRange( eval.Arguments ); if(queryBuilder.Length > 0) { queryBuilder.Append( " AND " ); } queryBuilder.Append( eval.QueryString ); return methodCall.Arguments[0]; } private class WhereEvaluator : ExpressionVisitor { private readonly List<String> arguments = new List<String>(); private readonly ITableFinder tableFinder; private StringBuilder builder = new StringBuilder(); private ContentResolverColumnMapping currentMap; private TableFindResult table; public WhereEvaluator( ITableFinder tableFinder, Uri existingTable ) { this.tableFinder = tableFinder; if(existingTable != null) { table = new TableFindResult( existingTable, null ); } } public List<String> Arguments { get { return arguments; } } public Boolean Fallback { get; private set; } public String QueryString { get { return builder.ToString(); } } public Uri Table { get { return table.Table; } } public Expression Evaluate( Expression expression ) { expression = Visit( expression ); if(!Fallback && table != null && table.MimeType != null) { builder.Insert( 0, String.Format( "(({0} = ?) AND ", ContactsContract.DataColumns.Mimetype ) ); builder.Append( ")" ); arguments.Insert( 0, table.MimeType ); } return expression; } protected override Expression VisitBinary( BinaryExpression binary ) { String current = builder.ToString(); builder = new StringBuilder(); Visit( binary.Left ); if(Fallback) { return binary; } String left = builder.ToString(); builder = new StringBuilder(); String joiner; switch(binary.NodeType) { case ExpressionType.AndAlso: joiner = " AND "; break; case ExpressionType.OrElse: joiner = " OR "; break; case ExpressionType.Equal: joiner = " = "; break; case ExpressionType.GreaterThan: joiner = " > "; break; case ExpressionType.LessThan: joiner = " < "; break; case ExpressionType.NotEqual: joiner = " IS NOT "; break; default: Fallback = true; return binary; } Visit( binary.Right ); if(Fallback) { if(binary.NodeType == ExpressionType.AndAlso) { Fallback = false; builder = new StringBuilder( current ); builder.Append( "(" ); builder.Append( left ); builder.Append( ")" ); return binary.Right; } else { return binary; } } String right = builder.ToString(); builder = new StringBuilder( current ); builder.Append( "(" ); builder.Append( left ); builder.Append( joiner ); builder.Append( right ); builder.Append( ")" ); return binary; } protected override Expression VisitConstant( ConstantExpression constant ) { if(constant.Value is IQueryable) { return constant; } if(constant.Value == null) { builder.Append( "NULL" ); } else { Object value = constant.Value; if(currentMap != null && currentMap.ValueToQueryable != null) { value = currentMap.ValueToQueryable( value ); } switch(Type.GetTypeCode( value.GetType() )) { case TypeCode.Object: Fallback = true; return constant; case TypeCode.Boolean: arguments.Add( (Boolean)value ? "1" : "0" ); builder.Append( "?" ); break; default: arguments.Add( value.ToString() ); builder.Append( "?" ); break; } } return base.VisitConstant( constant ); } protected override Expression VisitMember( MemberExpression memberExpression ) { TableFindResult result = tableFinder.Find( memberExpression ); if(table == null) { table = result; } else if(Table != result.Table || result.MimeType != table.MimeType) { Fallback = true; return memberExpression; } ContentResolverColumnMapping cmap = tableFinder.GetColumn( memberExpression.Member ); if(cmap == null || cmap.Columns == null) { Fallback = true; return memberExpression; } currentMap = cmap; if(cmap.Columns.Length == 1) { builder.Append( cmap.Columns[0] ); } else { throw new NotSupportedException(); } return base.VisitMember( memberExpression ); } } } }
using UnityEngine; using System.Collections; using System.Collections.Generic; using System; namespace UMA { /// <summary> /// Utility class for merging multiple skinned meshes. /// </summary> public static class SkinnedMeshCombiner { /// <summary> /// Container for source mesh data. /// </summary> public class CombineInstance { public UMAMeshData meshData; public int[] targetSubmeshIndices; public BitArray[] triangleMask; } private enum MeshComponents { none = 0, has_normals = 1, has_tangents = 2, has_colors32 = 4, has_uv = 8, has_uv2 = 16, has_uv3 = 32, has_uv4 = 64, has_blendShapes = 128, has_clothSkinning = 256, } static Dictionary<int, BoneIndexEntry> bonesCollection; static List<Matrix4x4> bindPoses; static List<int> bonesList; /// <summary> /// Combines a set of meshes into the target mesh. /// </summary> /// <param name="target">Target.</param> /// <param name="sources">Sources.</param> /// <param name="blendShapeSettings">BlendShape Settings.</param> public static void CombineMeshes(UMAMeshData target, CombineInstance[] sources, UMAData.BlendShapeSettings blendShapeSettings = null) { if (blendShapeSettings == null) blendShapeSettings = new UMAData.BlendShapeSettings(); int vertexCount = 0; int bindPoseCount = 0; int transformHierarchyCount = 0; int blendShapeCount = 0; MeshComponents meshComponents = MeshComponents.none; int subMeshCount = FindTargetSubMeshCount(sources); var subMeshTriangleLength = new int[subMeshCount]; AnalyzeSources(sources, subMeshTriangleLength, ref vertexCount, ref bindPoseCount, ref transformHierarchyCount, ref meshComponents, ref blendShapeCount); int[][] submeshTriangles = new int[subMeshCount][]; for (int i = 0; i < subMeshTriangleLength.Length; i++) { submeshTriangles[i] = target.GetSubmeshBuffer(subMeshTriangleLength[i], i); subMeshTriangleLength[i] = 0; } bool has_normals = (meshComponents & MeshComponents.has_normals) != MeshComponents.none; bool has_tangents = (meshComponents & MeshComponents.has_tangents) != MeshComponents.none; bool has_uv = (meshComponents & MeshComponents.has_uv) != MeshComponents.none; bool has_uv2 = (meshComponents & MeshComponents.has_uv2) != MeshComponents.none; bool has_uv3 = (meshComponents & MeshComponents.has_uv3) != MeshComponents.none; bool has_uv4 = (meshComponents & MeshComponents.has_uv4) != MeshComponents.none; bool has_colors32 = (meshComponents & MeshComponents.has_colors32) != MeshComponents.none; bool has_blendShapes = (meshComponents & MeshComponents.has_blendShapes) != MeshComponents.none; if (blendShapeSettings.ignoreBlendShapes) has_blendShapes = false; bool has_clothSkinning = (meshComponents & MeshComponents.has_clothSkinning) != MeshComponents.none; Vector3[] vertices = EnsureArrayLength(target.vertices, vertexCount); BoneWeight[] boneWeights = EnsureArrayLength(target.unityBoneWeights, vertexCount); Vector3[] normals = has_normals ? EnsureArrayLength(target.normals, vertexCount) : null; Vector4[] tangents = has_tangents ? EnsureArrayLength(target.tangents, vertexCount) : null; Vector2[] uv = has_uv ? EnsureArrayLength(target.uv, vertexCount) : null; Vector2[] uv2 = has_uv2 ? EnsureArrayLength(target.uv2, vertexCount) : null; Vector2[] uv3 = has_uv3 ? EnsureArrayLength(target.uv3, vertexCount) : null; Vector2[] uv4 = has_uv4 ? EnsureArrayLength(target.uv4, vertexCount) : null; Color32[] colors32 = has_colors32 ? EnsureArrayLength(target.colors32, vertexCount) : null; UMABlendShape[] blendShapes = has_blendShapes ? new UMABlendShape[blendShapeCount] : null; UMATransform[] umaTransforms = EnsureArrayLength(target.umaBones, transformHierarchyCount); ClothSkinningCoefficient[] clothSkinning = has_clothSkinning ? EnsureArrayLength(target.clothSkinning, vertexCount) : null; Dictionary<Vector3, int> clothVertices = has_clothSkinning ? new Dictionary<Vector3, int>(vertexCount) : null; Dictionary<Vector3, int> localClothVertices = has_clothSkinning ? new Dictionary<Vector3, int>(vertexCount) : null; int boneCount = 0; foreach (var source in sources) { MergeSortedTransforms(umaTransforms, ref boneCount, source.meshData.umaBones); } int vertexIndex = 0; if (bonesCollection == null) bonesCollection = new Dictionary<int, BoneIndexEntry>(boneCount); else bonesCollection.Clear(); if (bindPoses == null) bindPoses = new List<Matrix4x4>(bindPoseCount); else bindPoses.Clear(); if (bonesList == null) bonesList = new List<int>(boneCount); else bonesList.Clear(); int blendShapeIndex = 0; foreach (var source in sources) { int sourceVertexCount = source.meshData.vertices.Length; BuildBoneWeights(source.meshData.boneWeights, 0, boneWeights, vertexIndex, sourceVertexCount, source.meshData.boneNameHashes, source.meshData.bindPoses, bonesCollection, bindPoses, bonesList); Array.Copy(source.meshData.vertices, 0, vertices, vertexIndex, sourceVertexCount); if (has_normals) { if (source.meshData.normals != null && source.meshData.normals.Length > 0) { Array.Copy(source.meshData.normals, 0, normals, vertexIndex, sourceVertexCount); } else { FillArray(tangents, vertexIndex, sourceVertexCount, Vector3.zero); } } if (has_tangents) { if (source.meshData.tangents != null && source.meshData.tangents.Length > 0) { Array.Copy(source.meshData.tangents, 0, tangents, vertexIndex, sourceVertexCount); } else { FillArray(tangents, vertexIndex, sourceVertexCount, Vector4.zero); } } if (has_uv) { if (source.meshData.uv != null && source.meshData.uv.Length >= sourceVertexCount) { Array.Copy(source.meshData.uv, 0, uv, vertexIndex, sourceVertexCount); } else { FillArray(uv, vertexIndex, sourceVertexCount, Vector4.zero); } } if (has_uv2) { if (source.meshData.uv2 != null && source.meshData.uv2.Length >= sourceVertexCount) { Array.Copy(source.meshData.uv2, 0, uv2, vertexIndex, sourceVertexCount); } else { FillArray(uv2, vertexIndex, sourceVertexCount, Vector4.zero); } } if (has_uv3) { if (source.meshData.uv3 != null && source.meshData.uv3.Length >= sourceVertexCount) { Array.Copy(source.meshData.uv3, 0, uv3, vertexIndex, sourceVertexCount); } else { FillArray(uv3, vertexIndex, sourceVertexCount, Vector4.zero); } } if (has_uv4) { if (source.meshData.uv4 != null && source.meshData.uv4.Length >= sourceVertexCount) { Array.Copy(source.meshData.uv4, 0, uv4, vertexIndex, sourceVertexCount); } else { FillArray(uv4, vertexIndex, sourceVertexCount, Vector4.zero); } } if (has_colors32) { if (source.meshData.colors32 != null && source.meshData.colors32.Length > 0) { Array.Copy(source.meshData.colors32, 0, colors32, vertexIndex, sourceVertexCount); } else { Color32 white32 = Color.white; FillArray(colors32, vertexIndex, sourceVertexCount, white32); } } if (has_blendShapes) { if (source.meshData.blendShapes != null && source.meshData.blendShapes.Length > 0) { for (int shapeIndex = 0; shapeIndex < source.meshData.blendShapes.Length; shapeIndex++) { #region BlendShape Baking if(blendShapeSettings.bakeBlendShapes != null && blendShapeSettings.bakeBlendShapes.Count > 0) { // If there are names in the bakeBlendShape dictionary and we find them in the meshData blendshape list, then lets bake them instead of adding them. UMABlendShape currentShape = source.meshData.blendShapes[shapeIndex]; if( blendShapeSettings.bakeBlendShapes.ContainsKey(currentShape.shapeName)) { float weight = blendShapeSettings.bakeBlendShapes[currentShape.shapeName] * 100.0f; if (weight <= 0f) continue; // Baking in nothing, so skip it entirely // Let's find the frame this weight is in int frameIndex; int prevIndex; for (frameIndex = 0; frameIndex < currentShape.frames.Length; frameIndex++) { if (currentShape.frames[frameIndex].frameWeight >= weight) break; } // Let's calculate the weight for the frame we're in float frameWeight = 1f; float prevWeight = 0f; bool doLerp = false; // Weight is higher than the last frame, shape is over 100% if (frameIndex >= currentShape.frames.Length) { frameIndex = currentShape.frames.Length - 1; frameWeight = (weight / currentShape.frames[frameIndex].frameWeight); } else if (frameIndex > 0) { doLerp = true; prevWeight = currentShape.frames[frameIndex - 1].frameWeight; frameWeight = ((weight - prevWeight) / (currentShape.frames[frameIndex].frameWeight - prevWeight)); prevWeight = 1f - frameWeight; } else { frameWeight = (weight / currentShape.frames[frameIndex].frameWeight); } prevIndex = frameIndex - 1; // The blend shape frames lerp between the deltas of two adjacent frames. int vertIndex = vertexIndex; for (int bakeIndex = 0; bakeIndex < currentShape.frames[frameIndex].deltaVertices.Length; bakeIndex++, vertIndex++) { // Add the current frame's deltas vertices[vertIndex] += currentShape.frames[frameIndex].deltaVertices[bakeIndex] * frameWeight; // Add in the previous frame's deltas if (doLerp) vertices[vertIndex] += currentShape.frames[prevIndex].deltaVertices[bakeIndex] * prevWeight; } if (has_normals) { vertIndex = vertexIndex; for (int bakeIndex = 0; bakeIndex < currentShape.frames[frameIndex].deltaNormals.Length; bakeIndex++, vertIndex++) { normals[vertIndex] += currentShape.frames[frameIndex].deltaNormals[bakeIndex] * frameWeight; if (doLerp) normals[vertIndex] += currentShape.frames[prevIndex].deltaNormals[bakeIndex] * prevWeight; } } if (has_tangents) { vertIndex = vertexIndex; for (int bakeIndex = 0; bakeIndex < currentShape.frames[frameIndex].deltaTangents.Length; bakeIndex++, vertIndex++) { tangents[vertIndex] += (Vector4)currentShape.frames[frameIndex].deltaTangents[bakeIndex] * frameWeight; if (doLerp) tangents[vertIndex] += (Vector4)currentShape.frames[prevIndex].deltaTangents[bakeIndex] * prevWeight; } } continue; // If we bake then don't perform the rest of this interation of the loop. } } #endregion bool nameAlreadyExists = false; int i = 0; //Probably this would be better with a dictionary for (i = 0; i < blendShapeIndex; i++) { if (blendShapes[i].shapeName == source.meshData.blendShapes[shapeIndex].shapeName) { nameAlreadyExists = true; break; } } if (nameAlreadyExists)//Lets add the vertices data to the existing blendShape { if (blendShapes[i].frames.Length != source.meshData.blendShapes[shapeIndex].frames.Length) { Debug.LogError("SkinnedMeshCombiner: mesh blendShape frame counts don't match!"); break; } for (int frameIndex = 0; frameIndex < source.meshData.blendShapes[shapeIndex].frames.Length; frameIndex++) { Array.Copy(source.meshData.blendShapes[shapeIndex].frames[frameIndex].deltaVertices, 0, blendShapes[i].frames[frameIndex].deltaVertices, vertexIndex, sourceVertexCount); Array.Copy(source.meshData.blendShapes[shapeIndex].frames[frameIndex].deltaNormals, 0, blendShapes[i].frames[frameIndex].deltaNormals, vertexIndex, sourceVertexCount); Array.Copy(source.meshData.blendShapes[shapeIndex].frames[frameIndex].deltaTangents, 0, blendShapes[i].frames[frameIndex].deltaTangents, vertexIndex, sourceVertexCount); } } else { blendShapes[blendShapeIndex] = new UMABlendShape(); blendShapes[blendShapeIndex].shapeName = source.meshData.blendShapes[shapeIndex].shapeName; blendShapes[blendShapeIndex].frames = new UMABlendFrame[source.meshData.blendShapes[shapeIndex].frames.Length]; for (int frameIndex = 0; frameIndex < source.meshData.blendShapes[shapeIndex].frames.Length; frameIndex++) { blendShapes[blendShapeIndex].frames[frameIndex] = new UMABlendFrame(vertexCount); blendShapes[blendShapeIndex].frames[frameIndex].frameWeight = source.meshData.blendShapes[shapeIndex].frames[frameIndex].frameWeight; Array.Copy(source.meshData.blendShapes[shapeIndex].frames[frameIndex].deltaVertices, 0, blendShapes[blendShapeIndex].frames[frameIndex].deltaVertices, vertexIndex, sourceVertexCount); Array.Copy(source.meshData.blendShapes[shapeIndex].frames[frameIndex].deltaNormals, 0, blendShapes[blendShapeIndex].frames[frameIndex].deltaNormals, vertexIndex, sourceVertexCount); Array.Copy(source.meshData.blendShapes[shapeIndex].frames[frameIndex].deltaTangents, 0, blendShapes[blendShapeIndex].frames[frameIndex].deltaTangents, vertexIndex, sourceVertexCount); } blendShapeIndex++; } } } } if (has_clothSkinning) { localClothVertices.Clear(); if (source.meshData.clothSkinningSerialized != null && source.meshData.clothSkinningSerialized.Length > 0) { for (int i = 0; i < source.meshData.vertexCount; i++) { var vertice = source.meshData.vertices[i]; if (!localClothVertices.ContainsKey(vertice)) { int localCount = localClothVertices.Count; localClothVertices.Add(vertice, localCount); if (!clothVertices.ContainsKey(vertice)) { ConvertData(ref source.meshData.clothSkinningSerialized[localCount], ref clothSkinning[clothVertices.Count]); clothVertices.Add(vertice, clothVertices.Count); } else { ConvertData(ref source.meshData.clothSkinningSerialized[localCount], ref clothSkinning[clothVertices[vertice]]); } } } } else { for (int i = 0; i < source.meshData.vertexCount; i++) { var vertice = source.meshData.vertices[i]; if (!clothVertices.ContainsKey(vertice)) { clothSkinning[clothVertices.Count].maxDistance = 0; clothSkinning[clothVertices.Count].collisionSphereDistance = float.MaxValue; clothVertices.Add(vertice, clothVertices.Count); localClothVertices.Add(vertice, clothVertices.Count); } } } } for (int i = 0; i < source.meshData.subMeshCount; i++) { if (source.targetSubmeshIndices[i] >= 0) { int[] subTriangles = source.meshData.submeshes[i].triangles; int triangleLength = subTriangles.Length; int destMesh = source.targetSubmeshIndices[i]; if (source.triangleMask == null) { CopyIntArrayAdd(subTriangles, 0, submeshTriangles[destMesh], subMeshTriangleLength[destMesh], triangleLength, vertexIndex); subMeshTriangleLength[destMesh] += triangleLength; } else { MaskedCopyIntArrayAdd(subTriangles, 0, submeshTriangles[destMesh], subMeshTriangleLength[destMesh], triangleLength, vertexIndex, source.triangleMask[i] ); subMeshTriangleLength[destMesh] += (triangleLength - (UMAUtils.GetCardinality(source.triangleMask[i])*3)); } } } vertexIndex += sourceVertexCount; } if (vertexCount != vertexIndex) { Debug.LogError("Combined vertices size didn't match precomputed value!"); } // fill in new values. target.vertexCount = vertexCount; target.vertices = vertices; target.unityBoneWeights = boneWeights; target.bindPoses = bindPoses.ToArray(); target.normals = normals; target.tangents = tangents; target.uv = uv; target.uv2 = uv2; target.uv3 = uv3; target.uv4 = uv4; target.colors32 = colors32; if (has_blendShapes) target.blendShapes = blendShapes; if (has_clothSkinning) { Array.Resize(ref clothSkinning, clothVertices.Count); } target.clothSkinning = clothSkinning; target.subMeshCount = subMeshCount; target.submeshes = new SubMeshTriangles[subMeshCount]; target.umaBones = umaTransforms; target.umaBoneCount = boneCount; for (int i = 0; i < subMeshCount; i++) { target.submeshes[i].triangles = submeshTriangles[i]; } target.boneNameHashes = bonesList.ToArray(); } public static UMAMeshData ShallowInstanceMesh(UMAMeshData source) { var target = new UMAMeshData(); target.bindPoses = source.bindPoses; target.boneNameHashes = source.boneNameHashes; target.unityBoneWeights = UMABoneWeight.Convert(source.boneWeights); target.colors32 = source.colors32; target.normals = source.normals; target.rootBoneHash = source.rootBoneHash; target.subMeshCount = source.subMeshCount; target.submeshes = source.submeshes; target.tangents = source.tangents; target.umaBoneCount = source.umaBoneCount; target.umaBones = source.umaBones; target.uv = source.uv; target.uv2 = source.uv2; target.uv3 = source.uv3; target.uv4 = source.uv4; target.vertexCount = source.vertexCount; target.vertices = source.vertices; target.blendShapes = source.blendShapes; if (source.clothSkinningSerialized != null && source.clothSkinningSerialized.Length != 0) { target.clothSkinning = new ClothSkinningCoefficient[source.clothSkinningSerialized.Length]; for (int i = 0; i < source.clothSkinningSerialized.Length; i++) { ConvertData(ref source.clothSkinningSerialized[i], ref target.clothSkinning[i]); } } else { target.clothSkinning = null; } return target; } public static void ConvertData(ref Vector2 source, ref ClothSkinningCoefficient dest) { dest.collisionSphereDistance = source.x; dest.maxDistance = source.y; } public static void ConvertData(ref ClothSkinningCoefficient source, ref Vector2 dest) { dest.x = source.collisionSphereDistance; dest.y = source.maxDistance; } private static void MergeSortedTransforms(UMATransform[] mergedTransforms, ref int len1, UMATransform[] umaTransforms) { int newBones = 0; int pos1 = 0; int pos2 = 0; int len2 = umaTransforms.Length; while(pos1 < len1 && pos2 < len2 ) { long i = ((long)mergedTransforms[pos1].hash) - ((long)umaTransforms[pos2].hash); if (i == 0) { pos1++; pos2++; } else if (i < 0) { pos1++; } else { pos2++; newBones++; } } newBones += len2 - pos2; pos1 = len1 - 1; pos2 = len2 - 1; len1 += newBones; int dest = len1-1; while (pos1 >= 0 && pos2 >= 0) { long i = ((long)mergedTransforms[pos1].hash) - ((long)umaTransforms[pos2].hash); if (i == 0) { mergedTransforms[dest] = mergedTransforms[pos1]; pos1--; pos2--; } else if (i > 0) { mergedTransforms[dest] = mergedTransforms[pos1]; pos1--; } else { mergedTransforms[dest] = umaTransforms[pos2]; pos2--; } dest--; } while (pos2 >= 0) { mergedTransforms[dest] = umaTransforms[pos2]; pos2--; dest--; } } private static void AnalyzeSources(CombineInstance[] sources, int[] subMeshTriangleLength, ref int vertexCount, ref int bindPoseCount, ref int transformHierarchyCount, ref MeshComponents meshComponents, ref int blendShapeCount) { HashSet<string> blendShapeNames = new HashSet<string> (); //Hash to find all the unique blendshape names for (int i = 0; i < subMeshTriangleLength.Length; i++) { subMeshTriangleLength[i] = 0; } foreach (var source in sources) { vertexCount += source.meshData.vertices.Length; bindPoseCount += source.meshData.bindPoses.Length; transformHierarchyCount += source.meshData.umaBones.Length; if (source.meshData.normals != null && source.meshData.normals.Length != 0) meshComponents |= MeshComponents.has_normals; if (source.meshData.tangents != null && source.meshData.tangents.Length != 0) meshComponents |= MeshComponents.has_tangents; if (source.meshData.uv != null && source.meshData.uv.Length != 0) meshComponents |= MeshComponents.has_uv; if (source.meshData.uv2 != null && source.meshData.uv2.Length != 0) meshComponents |= MeshComponents.has_uv2; if (source.meshData.uv3 != null && source.meshData.uv3.Length != 0) meshComponents |= MeshComponents.has_uv3; if (source.meshData.uv4 != null && source.meshData.uv4.Length != 0) meshComponents |= MeshComponents.has_uv4; if (source.meshData.colors32 != null && source.meshData.colors32.Length != 0) meshComponents |= MeshComponents.has_colors32; if (source.meshData.clothSkinningSerialized != null && source.meshData.clothSkinningSerialized.Length != 0) meshComponents |= MeshComponents.has_clothSkinning; //If we find a blendshape on this mesh then lets add it to the blendShapeNames hash to get all the unique names if (source.meshData.blendShapes != null && source.meshData.blendShapes.Length != 0) { for (int shapeIndex = 0; shapeIndex < source.meshData.blendShapes.Length; shapeIndex++) blendShapeNames.Add (source.meshData.blendShapes [shapeIndex].shapeName); } for (int i = 0; i < source.meshData.subMeshCount; i++) { if (source.targetSubmeshIndices[i] >= 0) { int triangleLength = (source.triangleMask == null) ? source.meshData.submeshes[i].triangles.Length : (source.meshData.submeshes[i].triangles.Length - (UMAUtils.GetCardinality(source.triangleMask[i]) * 3)); subMeshTriangleLength[source.targetSubmeshIndices[i]] += triangleLength; } } } //If our blendshape hash has at least 1 name, then we have a blendshape! if (blendShapeNames.Count > 0) { blendShapeCount = blendShapeNames.Count; meshComponents |= MeshComponents.has_blendShapes; } } private static int FindTargetSubMeshCount(CombineInstance[] sources) { int highestTargetIndex = -1; foreach (var source in sources) { foreach (var targetIndex in source.targetSubmeshIndices) { if (highestTargetIndex < targetIndex) { highestTargetIndex = targetIndex; } } } return highestTargetIndex + 1; } private static void BuildBoneWeights(UMABoneWeight[] source, int sourceIndex, BoneWeight[] dest, int destIndex, int count, int[] bones, Matrix4x4[] bindPoses, Dictionary<int, BoneIndexEntry> bonesCollection, List<Matrix4x4> bindPosesList, List<int> bonesList) { int[] boneMapping = new int[bones.Length]; for (int i = 0; i < boneMapping.Length; i++) { boneMapping[i] = TranslateBoneIndex(i, bones, bindPoses, bonesCollection, bindPosesList, bonesList); } while (count-- > 0) { TranslateBoneWeight(ref source[sourceIndex++], ref dest[destIndex++], boneMapping); } } private static void TranslateBoneWeight(ref UMABoneWeight source, ref BoneWeight dest, int[] boneMapping) { dest.weight0 = source.weight0; dest.weight1 = source.weight1; dest.weight2 = source.weight2; dest.weight3 = source.weight3; dest.boneIndex0 = boneMapping[source.boneIndex0]; dest.boneIndex1 = boneMapping[source.boneIndex1]; dest.boneIndex2 = boneMapping[source.boneIndex2]; dest.boneIndex3 = boneMapping[source.boneIndex3]; } private struct BoneIndexEntry { public int index; public List<int> indices; public int Count { get { return index >= 0 ? 1 : indices.Count; } } public int this[int idx] { get { if (index >= 0) { if (idx == 0) return index; throw new ArgumentOutOfRangeException(); } return indices[idx]; } } internal void AddIndex(int idx) { if (index >= 0) { indices = new List<int>(10); indices.Add(index); index = -1; } indices.Add(idx); } } private static bool CompareSkinningMatrices(Matrix4x4 m1, ref Matrix4x4 m2) { if (Mathf.Abs(m1.m00 - m2.m00) > 0.0001) return false; if (Mathf.Abs(m1.m01 - m2.m01) > 0.0001) return false; if (Mathf.Abs(m1.m02 - m2.m02) > 0.0001) return false; if (Mathf.Abs(m1.m03 - m2.m03) > 0.0001) return false; if (Mathf.Abs(m1.m10 - m2.m10) > 0.0001) return false; if (Mathf.Abs(m1.m11 - m2.m11) > 0.0001) return false; if (Mathf.Abs(m1.m12 - m2.m12) > 0.0001) return false; if (Mathf.Abs(m1.m13 - m2.m13) > 0.0001) return false; if (Mathf.Abs(m1.m20 - m2.m20) > 0.0001) return false; if (Mathf.Abs(m1.m21 - m2.m21) > 0.0001) return false; if (Mathf.Abs(m1.m22 - m2.m22) > 0.0001) return false; if (Mathf.Abs(m1.m23 - m2.m23) > 0.0001) return false; // These never change in a TRS Matrix4x4 // if (Mathf.Abs(m1.m30 - m2.m30) > 0.0001) return false; // if (Mathf.Abs(m1.m31 - m2.m31) > 0.0001) return false; // if (Mathf.Abs(m1.m32 - m2.m32) > 0.0001) return false; // if (Mathf.Abs(m1.m33 - m2.m33) > 0.0001) return false; return true; } private static int TranslateBoneIndex(int index, int[] bonesHashes, Matrix4x4[] bindPoses, Dictionary<int, BoneIndexEntry> bonesCollection, List<Matrix4x4> bindPosesList, List<int> bonesList) { var boneTransform = bonesHashes[index]; BoneIndexEntry entry; if (bonesCollection.TryGetValue(boneTransform, out entry)) { for (int i = 0; i < entry.Count; i++) { var res = entry[i]; if (CompareSkinningMatrices(bindPosesList[res], ref bindPoses[index])) { return res; } } var idx = bindPosesList.Count; entry.AddIndex(idx); bindPosesList.Add(bindPoses[index]); bonesList.Add(boneTransform); return idx; } else { var idx = bindPosesList.Count; bonesCollection.Add(boneTransform, new BoneIndexEntry() { index = idx }); bindPosesList.Add(bindPoses[index]); bonesList.Add(boneTransform); return idx; } } private static void CopyColorsToColors32(Color[] source, int sourceIndex, Color32[] dest, int destIndex, int count) { while (count-- > 0) { var sColor = source[sourceIndex++]; dest[destIndex++] = new Color32((byte)Mathf.RoundToInt(sColor.r * 255f), (byte)Mathf.RoundToInt(sColor.g * 255f), (byte)Mathf.RoundToInt(sColor.b * 255f), (byte)Mathf.RoundToInt(sColor.a * 255f)); } } private static void FillArray(Vector4[] array, int index, int count, Vector4 value) { while (count-- > 0) { array[index++] = value; } } private static void FillArray(Vector3[] array, int index, int count, Vector3 value) { while (count-- > 0) { array[index++] = value; } } private static void FillArray(Vector2[] array, int index, int count, Vector2 value) { while (count-- > 0) { array[index++] = value; } } private static void FillArray(Color[] array, int index, int count, Color value) { while (count-- > 0) { array[index++] = value; } } private static void FillArray(Color32[] array, int index, int count, Color32 value) { while (count-- > 0) { array[index++] = value; } } private static void CopyIntArrayAdd(int[] source, int sourceIndex, int[] dest, int destIndex, int count, int add) { for (int i = 0; i < count; i++) { dest[destIndex++] = source[sourceIndex++] + add; } } public static void MaskedCopyIntArrayAdd(int[] source, int sourceIndex, int[] dest, int destIndex, int count, int add, BitArray mask) { if ((mask.Count*3) != source.Length || (mask.Count*3) != count) { Debug.LogError("MaskedCopyIntArrayAdd: mask and source count do not match!"); return; } for (int i = 0; i < count; i+=3) { if (!mask[(i/3)]) { dest[destIndex++] = source[sourceIndex + i + 0] + add; dest[destIndex++] = source[sourceIndex + i + 1] + add; dest[destIndex++] = source[sourceIndex + i + 2] + add; } } } private static T[] EnsureArrayLength<T>(T[] oldArray, int newLength) { if (newLength <= 0) return null; if (oldArray != null && oldArray.Length >= newLength) return oldArray; // Debug.Log("EnsureArrayLength allocating array of " + newLength + " of type: " + typeof(T)); return new T[newLength]; } } }
// // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. using System; using System.Linq; namespace Microsoft.WindowsAzure.Management.Compute.Models { /// <summary> /// Parameters supplied to the Create Virtual Machine Image operation. /// </summary> public partial class VirtualMachineOSImageCreateParameters { private string _description; /// <summary> /// Optional. Specifies the description of the OS image. /// </summary> public string Description { get { return this._description; } set { this._description = value; } } private string _eula; /// <summary> /// Optional. Specifies the End User License Agreement that is /// associated with the image. The value for this element is a string, /// but it is recommended that the value be a URL that points to a /// EULA. /// </summary> public string Eula { get { return this._eula; } set { this._eula = value; } } private Uri _iconUri; /// <summary> /// Optional. Specifies the URI to the icon that is displayed for the /// image in the Management Portal. /// </summary> public Uri IconUri { get { return this._iconUri; } set { this._iconUri = value; } } private string _imageFamily; /// <summary> /// Optional. Specifies a value that can be used to group OS images. /// </summary> public string ImageFamily { get { return this._imageFamily; } set { this._imageFamily = value; } } private bool _isPremium; /// <summary> /// Required. Indicates if the image contains software or associated /// services that will incur charges above the core price for the /// virtual machine. /// </summary> public bool IsPremium { get { return this._isPremium; } set { this._isPremium = value; } } private string _label; /// <summary> /// Required. Specifies the friendly name of the image. /// </summary> public string Label { get { return this._label; } set { this._label = value; } } private string _language; /// <summary> /// Optional. Specifies the language of the image. The Language element /// is only available using version 2013-03-01 or higher. /// </summary> public string Language { get { return this._language; } set { this._language = value; } } private Uri _mediaLinkUri; /// <summary> /// Required. Specifies the location of the blob in Azure storage. The /// blob location must belong to a storage account in the subscription /// specified by the SubscriptionId value in the operation call. /// Example: http://example.blob.core.windows.net/disks/mydisk.vhd. /// </summary> public Uri MediaLinkUri { get { return this._mediaLinkUri; } set { this._mediaLinkUri = value; } } private string _name; /// <summary> /// Required. Specifies a name that Azure uses to identify the image /// when creating one or more virtual machines. /// </summary> public string Name { get { return this._name; } set { this._name = value; } } private string _operatingSystemType; /// <summary> /// Required. The operating system type of the OS image. Possible /// values are: Linux or Windows. /// </summary> public string OperatingSystemType { get { return this._operatingSystemType; } set { this._operatingSystemType = value; } } private Uri _privacyUri; /// <summary> /// Optional. Specifies the URI that points to a document that contains /// the privacy policy related to the OS image. /// </summary> public Uri PrivacyUri { get { return this._privacyUri; } set { this._privacyUri = value; } } private System.DateTime? _publishedDate; /// <summary> /// Optional. Specifies the date when the OS image was added to the /// image repository. /// </summary> public System.DateTime? PublishedDate { get { return this._publishedDate; } set { this._publishedDate = value; } } private string _recommendedVMSize; /// <summary> /// Optional. Specifies the size to use for the virtual machine that is /// created from the OS image. /// </summary> public string RecommendedVMSize { get { return this._recommendedVMSize; } set { this._recommendedVMSize = value; } } private bool _showInGui; /// <summary> /// Required. Specifies whether the image should appear in the image /// gallery. /// </summary> public bool ShowInGui { get { return this._showInGui; } set { this._showInGui = value; } } private Uri _smallIconUri; /// <summary> /// Optional. Specifies the URI to the small icon that is displayed /// when the image is presented in the Azure Management Portal. The /// SmallIconUri element is only available using version 2013-03-01 or /// higher. /// </summary> public Uri SmallIconUri { get { return this._smallIconUri; } set { this._smallIconUri = value; } } /// <summary> /// Initializes a new instance of the /// VirtualMachineOSImageCreateParameters class. /// </summary> public VirtualMachineOSImageCreateParameters() { } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Security; using System.Diagnostics; using System.Runtime.ExceptionServices; using System.Runtime.CompilerServices; namespace System.Threading.Tasks { // Task type used to implement: Task ContinueWith(Action<Task,...>) internal sealed class ContinuationTaskFromTask : Task { private Task m_antecedent; public ContinuationTaskFromTask( Task antecedent, Delegate action, object state, TaskCreationOptions creationOptions, InternalTaskOptions internalOptions) : base(action, state, Task.InternalCurrentIfAttached(creationOptions), default, creationOptions, internalOptions, null) { Debug.Assert(action is Action<Task> || action is Action<Task, object>, "Invalid delegate type in ContinuationTaskFromTask"); m_antecedent = antecedent; } /// <summary> /// Evaluates the value selector of the Task which is passed in as an object and stores the result. /// </summary> internal override void InnerInvoke() { // Get and null out the antecedent. This is crucial to avoid a memory // leak with long chains of continuations. var antecedent = m_antecedent; Debug.Assert(antecedent != null, "No antecedent was set for the ContinuationTaskFromTask."); m_antecedent = null; // Notify the debugger we're completing an asynchronous wait on a task antecedent.NotifyDebuggerOfWaitCompletionIfNecessary(); // Invoke the delegate Debug.Assert(m_action != null); if (m_action is Action<Task> action) { action(antecedent); return; } if (m_action is Action<Task, object> actionWithState) { actionWithState(antecedent, m_stateObject); return; } Debug.Fail("Invalid m_action in ContinuationTaskFromTask"); } } // Task type used to implement: Task<TResult> ContinueWith(Func<Task,...>) internal sealed class ContinuationResultTaskFromTask<TResult> : Task<TResult> { private Task m_antecedent; public ContinuationResultTaskFromTask( Task antecedent, Delegate function, object state, TaskCreationOptions creationOptions, InternalTaskOptions internalOptions) : base(function, state, Task.InternalCurrentIfAttached(creationOptions), default, creationOptions, internalOptions, null) { Debug.Assert(function is Func<Task, TResult> || function is Func<Task, object, TResult>, "Invalid delegate type in ContinuationResultTaskFromTask"); m_antecedent = antecedent; } /// <summary> /// Evaluates the value selector of the Task which is passed in as an object and stores the result. /// </summary> internal override void InnerInvoke() { // Get and null out the antecedent. This is crucial to avoid a memory // leak with long chains of continuations. var antecedent = m_antecedent; Debug.Assert(antecedent != null, "No antecedent was set for the ContinuationResultTaskFromTask."); m_antecedent = null; // Notify the debugger we're completing an asynchronous wait on a task antecedent.NotifyDebuggerOfWaitCompletionIfNecessary(); // Invoke the delegate Debug.Assert(m_action != null); if (m_action is Func<Task, TResult> func) { m_result = func(antecedent); return; } if (m_action is Func<Task, object, TResult> funcWithState) { m_result = funcWithState(antecedent, m_stateObject); return; } Debug.Fail("Invalid m_action in ContinuationResultTaskFromTask"); } } // Task type used to implement: Task ContinueWith(Action<Task<TAntecedentResult>,...>) internal sealed class ContinuationTaskFromResultTask<TAntecedentResult> : Task { private Task<TAntecedentResult> m_antecedent; public ContinuationTaskFromResultTask( Task<TAntecedentResult> antecedent, Delegate action, object state, TaskCreationOptions creationOptions, InternalTaskOptions internalOptions) : base(action, state, Task.InternalCurrentIfAttached(creationOptions), default, creationOptions, internalOptions, null) { Debug.Assert(action is Action<Task<TAntecedentResult>> || action is Action<Task<TAntecedentResult>, object>, "Invalid delegate type in ContinuationTaskFromResultTask"); m_antecedent = antecedent; } /// <summary> /// Evaluates the value selector of the Task which is passed in as an object and stores the result. /// </summary> internal override void InnerInvoke() { // Get and null out the antecedent. This is crucial to avoid a memory // leak with long chains of continuations. var antecedent = m_antecedent; Debug.Assert(antecedent != null, "No antecedent was set for the ContinuationTaskFromResultTask."); m_antecedent = null; // Notify the debugger we're completing an asynchronous wait on a task antecedent.NotifyDebuggerOfWaitCompletionIfNecessary(); // Invoke the delegate Debug.Assert(m_action != null); if (m_action is Action<Task<TAntecedentResult>> action) { action(antecedent); return; } if (m_action is Action<Task<TAntecedentResult>, object> actionWithState) { actionWithState(antecedent, m_stateObject); return; } Debug.Fail("Invalid m_action in ContinuationTaskFromResultTask"); } } // Task type used to implement: Task<TResult> ContinueWith(Func<Task<TAntecedentResult>,...>) internal sealed class ContinuationResultTaskFromResultTask<TAntecedentResult, TResult> : Task<TResult> { private Task<TAntecedentResult> m_antecedent; public ContinuationResultTaskFromResultTask( Task<TAntecedentResult> antecedent, Delegate function, object state, TaskCreationOptions creationOptions, InternalTaskOptions internalOptions) : base(function, state, Task.InternalCurrentIfAttached(creationOptions), default, creationOptions, internalOptions, null) { Debug.Assert(function is Func<Task<TAntecedentResult>, TResult> || function is Func<Task<TAntecedentResult>, object, TResult>, "Invalid delegate type in ContinuationResultTaskFromResultTask"); m_antecedent = antecedent; } /// <summary> /// Evaluates the value selector of the Task which is passed in as an object and stores the result. /// </summary> internal override void InnerInvoke() { // Get and null out the antecedent. This is crucial to avoid a memory // leak with long chains of continuations. var antecedent = m_antecedent; Debug.Assert(antecedent != null, "No antecedent was set for the ContinuationResultTaskFromResultTask."); m_antecedent = null; // Notify the debugger we're completing an asynchronous wait on a task antecedent.NotifyDebuggerOfWaitCompletionIfNecessary(); // Invoke the delegate Debug.Assert(m_action != null); if (m_action is Func<Task<TAntecedentResult>, TResult> func) { m_result = func(antecedent); return; } if (m_action is Func<Task<TAntecedentResult>, object, TResult> funcWithState) { m_result = funcWithState(antecedent, m_stateObject); return; } Debug.Fail("Invalid m_action in ContinuationResultTaskFromResultTask"); } } // For performance reasons, we don't just have a single way of representing // a continuation object. Rather, we have a hierarchy of types: // - TaskContinuation: abstract base that provides a virtual Run method // - StandardTaskContinuation: wraps a task,options,and scheduler, and overrides Run to process the task with that configuration // - AwaitTaskContinuation: base for continuations created through TaskAwaiter; targets default scheduler by default // - TaskSchedulerAwaitTaskContinuation: awaiting with a non-default TaskScheduler // - SynchronizationContextAwaitTaskContinuation: awaiting with a "current" sync ctx /// <summary>Represents a continuation.</summary> internal abstract class TaskContinuation { /// <summary>Inlines or schedules the continuation.</summary> /// <param name="completedTask">The antecedent task that has completed.</param> /// <param name="canInlineContinuationTask">true if inlining is permitted; otherwise, false.</param> internal abstract void Run(Task completedTask, bool canInlineContinuationTask); /// <summary>Tries to run the task on the current thread, if possible; otherwise, schedules it.</summary> /// <param name="task">The task to run</param> /// <param name="needsProtection"> /// true if we need to protect against multiple threads racing to start/cancel the task; otherwise, false. /// </param> protected static void InlineIfPossibleOrElseQueue(Task task, bool needsProtection) { Debug.Assert(task != null); Debug.Assert(task.m_taskScheduler != null); // Set the TASK_STATE_STARTED flag. This only needs to be done // if the task may be canceled or if someone else has a reference to it // that may try to execute it. if (needsProtection) { if (!task.MarkStarted()) return; // task has been previously started or canceled. Stop processing. } else { task.m_stateFlags |= Task.TASK_STATE_STARTED; } // Try to inline it but queue if we can't try { if (!task.m_taskScheduler.TryRunInline(task, taskWasPreviouslyQueued: false)) { task.m_taskScheduler.InternalQueueTask(task); } } catch (Exception e) { // Either TryRunInline() or QueueTask() threw an exception. Record the exception, marking the task as Faulted. // However if it was a ThreadAbortException coming from TryRunInline we need to skip here, // because it would already have been handled in Task.Execute() TaskSchedulerException tse = new TaskSchedulerException(e); task.AddException(tse); task.Finish(false); // Don't re-throw. } } // // This helper routine is targeted by the debugger. // #if PROJECTN [DependencyReductionRoot] #endif internal abstract Delegate[] GetDelegateContinuationsForDebugger(); } /// <summary>Provides the standard implementation of a task continuation.</summary> internal class StandardTaskContinuation : TaskContinuation { /// <summary>The unstarted continuation task.</summary> internal readonly Task m_task; /// <summary>The options to use with the continuation task.</summary> internal readonly TaskContinuationOptions m_options; /// <summary>The task scheduler with which to run the continuation task.</summary> private readonly TaskScheduler m_taskScheduler; /// <summary>Initializes a new continuation.</summary> /// <param name="task">The task to be activated.</param> /// <param name="options">The continuation options.</param> /// <param name="scheduler">The scheduler to use for the continuation.</param> internal StandardTaskContinuation(Task task, TaskContinuationOptions options, TaskScheduler scheduler) { Debug.Assert(task != null, "TaskContinuation ctor: task is null"); Debug.Assert(scheduler != null, "TaskContinuation ctor: scheduler is null"); m_task = task; m_options = options; m_taskScheduler = scheduler; if (AsyncCausalityTracer.LoggingOn) AsyncCausalityTracer.TraceOperationCreation(m_task, "Task.ContinueWith: " + task.m_action.Method.Name); if (Task.s_asyncDebuggingEnabled) Task.AddToActiveTasks(m_task); } /// <summary>Invokes the continuation for the target completion task.</summary> /// <param name="completedTask">The completed task.</param> /// <param name="canInlineContinuationTask">Whether the continuation can be inlined.</param> internal override void Run(Task completedTask, bool canInlineContinuationTask) { Debug.Assert(completedTask != null); Debug.Assert(completedTask.IsCompleted, "ContinuationTask.Run(): completedTask not completed"); // Check if the completion status of the task works with the desired // activation criteria of the TaskContinuationOptions. TaskContinuationOptions options = m_options; bool isRightKind = completedTask.IsCompletedSuccessfully ? (options & TaskContinuationOptions.NotOnRanToCompletion) == 0 : (completedTask.IsCanceled ? (options & TaskContinuationOptions.NotOnCanceled) == 0 : (options & TaskContinuationOptions.NotOnFaulted) == 0); // If the completion status is allowed, run the continuation. Task continuationTask = m_task; if (isRightKind) { //If the task was cancel before running (e.g a ContinueWhenAll with a cancelled caancelation token) //we will still flow it to ScheduleAndStart() were it will check the status before running //We check here to avoid faulty logs that contain a join event to an operation that was already set as completed. if (!continuationTask.IsCanceled && AsyncCausalityTracer.LoggingOn) { // Log now that we are sure that this continuation is being ran AsyncCausalityTracer.TraceOperationRelation(continuationTask, CausalityRelation.AssignDelegate); } continuationTask.m_taskScheduler = m_taskScheduler; // Either run directly or just queue it up for execution, depending // on whether synchronous or asynchronous execution is wanted. if (canInlineContinuationTask && // inlining is allowed by the caller (options & TaskContinuationOptions.ExecuteSynchronously) != 0) // synchronous execution was requested by the continuation's creator { InlineIfPossibleOrElseQueue(continuationTask, needsProtection: true); } else { try { continuationTask.ScheduleAndStart(needsProtection: true); } catch (TaskSchedulerException) { // No further action is necessary -- ScheduleAndStart() already transitioned the // task to faulted. But we want to make sure that no exception is thrown from here. } } } // Otherwise, the final state of this task does not match the desired // continuation activation criteria; cancel it to denote this. else continuationTask.InternalCancel(false); } internal override Delegate[] GetDelegateContinuationsForDebugger() { if (m_task.m_action == null) { return m_task.GetDelegateContinuationsForDebugger(); } return new Delegate[] { m_task.m_action }; } } /// <summary>Task continuation for awaiting with a current synchronization context.</summary> internal sealed class SynchronizationContextAwaitTaskContinuation : AwaitTaskContinuation { /// <summary>SendOrPostCallback delegate to invoke the action.</summary> private static readonly SendOrPostCallback s_postCallback = state => ((Action)state)(); // can't use InvokeAction as it's SecurityCritical /// <summary>Cached delegate for PostAction</summary> private static ContextCallback s_postActionCallback; /// <summary>The context with which to run the action.</summary> private readonly SynchronizationContext m_syncContext; /// <summary>Initializes the SynchronizationContextAwaitTaskContinuation.</summary> /// <param name="context">The synchronization context with which to invoke the action. Must not be null.</param> /// <param name="action">The action to invoke. Must not be null.</param> /// <param name="flowExecutionContext">Whether to capture and restore ExecutionContext.</param> internal SynchronizationContextAwaitTaskContinuation( SynchronizationContext context, Action action, bool flowExecutionContext) : base(action, flowExecutionContext) { Debug.Assert(context != null); m_syncContext = context; } /// <summary>Inlines or schedules the continuation.</summary> /// <param name="task">The antecedent task, which is ignored.</param> /// <param name="canInlineContinuationTask">true if inlining is permitted; otherwise, false.</param> internal sealed override void Run(Task task, bool canInlineContinuationTask) { // If we're allowed to inline, run the action on this thread. if (canInlineContinuationTask && m_syncContext == SynchronizationContext.Current) { RunCallback(GetInvokeActionCallback(), m_action, ref Task.t_currentTask); } // Otherwise, Post the action back to the SynchronizationContext. else { TplEventSource log = TplEventSource.Log; if (log.IsEnabled()) { m_continuationId = Task.NewId(); log.AwaitTaskContinuationScheduled((task.ExecutingTaskScheduler ?? TaskScheduler.Default).Id, task.Id, m_continuationId); } RunCallback(GetPostActionCallback(), this, ref Task.t_currentTask); } // Any exceptions will be handled by RunCallback. } /// <summary>Calls InvokeOrPostAction(false) on the supplied SynchronizationContextAwaitTaskContinuation.</summary> /// <param name="state">The SynchronizationContextAwaitTaskContinuation.</param> private static void PostAction(object state) { var c = (SynchronizationContextAwaitTaskContinuation)state; TplEventSource log = TplEventSource.Log; if (log.TasksSetActivityIds && c.m_continuationId != 0) { c.m_syncContext.Post(s_postCallback, GetActionLogDelegate(c.m_continuationId, c.m_action)); } else { c.m_syncContext.Post(s_postCallback, c.m_action); // s_postCallback is manually cached, as the compiler won't in a SecurityCritical method } } private static Action GetActionLogDelegate(int continuationId, Action action) { return () => { Guid savedActivityId; Guid activityId = TplEventSource.CreateGuidForTaskID(continuationId); System.Diagnostics.Tracing.EventSource.SetCurrentThreadActivityId(activityId, out savedActivityId); try { action(); } finally { System.Diagnostics.Tracing.EventSource.SetCurrentThreadActivityId(savedActivityId); } }; } /// <summary>Gets a cached delegate for the PostAction method.</summary> /// <returns> /// A delegate for PostAction, which expects a SynchronizationContextAwaitTaskContinuation /// to be passed as state. /// </returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] private static ContextCallback GetPostActionCallback() { ContextCallback callback = s_postActionCallback; if (callback == null) { s_postActionCallback = callback = PostAction; } // lazily initialize SecurityCritical delegate return callback; } } /// <summary>Task continuation for awaiting with a task scheduler.</summary> internal sealed class TaskSchedulerAwaitTaskContinuation : AwaitTaskContinuation { /// <summary>The scheduler on which to run the action.</summary> private readonly TaskScheduler m_scheduler; /// <summary>Initializes the TaskSchedulerAwaitTaskContinuation.</summary> /// <param name="scheduler">The task scheduler with which to invoke the action. Must not be null.</param> /// <param name="action">The action to invoke. Must not be null.</param> /// <param name="flowExecutionContext">Whether to capture and restore ExecutionContext.</param> internal TaskSchedulerAwaitTaskContinuation( TaskScheduler scheduler, Action action, bool flowExecutionContext) : base(action, flowExecutionContext) { Debug.Assert(scheduler != null); m_scheduler = scheduler; } /// <summary>Inlines or schedules the continuation.</summary> /// <param name="ignored">The antecedent task, which is ignored.</param> /// <param name="canInlineContinuationTask">true if inlining is permitted; otherwise, false.</param> internal sealed override void Run(Task ignored, bool canInlineContinuationTask) { // If we're targeting the default scheduler, we can use the faster path provided by the base class. if (m_scheduler == TaskScheduler.Default) { base.Run(ignored, canInlineContinuationTask); } else { // We permit inlining if the caller allows us to, and // either we're on a thread pool thread (in which case we're fine running arbitrary code) // or we're already on the target scheduler (in which case we'll just ask the scheduler // whether it's ok to run here). We include the IsThreadPoolThread check here, whereas // we don't in AwaitTaskContinuation.Run, since here it expands what's allowed as opposed // to in AwaitTaskContinuation.Run where it restricts what's allowed. bool inlineIfPossible = canInlineContinuationTask && (TaskScheduler.InternalCurrent == m_scheduler || Thread.CurrentThread.IsThreadPoolThread); // Create the continuation task task. If we're allowed to inline, try to do so. // The target scheduler may still deny us from executing on this thread, in which case this'll be queued. var task = CreateTask(state => { try { ((Action)state)(); } catch (Exception exception) { Task.ThrowAsync(exception, targetContext: null); } }, m_action, m_scheduler); if (inlineIfPossible) { InlineIfPossibleOrElseQueue(task, needsProtection: false); } else { // We need to run asynchronously, so just schedule the task. try { task.ScheduleAndStart(needsProtection: false); } catch (TaskSchedulerException) { } // No further action is necessary, as ScheduleAndStart already transitioned task to faulted } } } } /// <summary>Base task continuation class used for await continuations.</summary> internal class AwaitTaskContinuation : TaskContinuation, IThreadPoolWorkItem { /// <summary>The ExecutionContext with which to run the continuation.</summary> private readonly ExecutionContext m_capturedContext; /// <summary>The action to invoke.</summary> protected readonly Action m_action; protected int m_continuationId; /// <summary>Initializes the continuation.</summary> /// <param name="action">The action to invoke. Must not be null.</param> /// <param name="flowExecutionContext">Whether to capture and restore ExecutionContext.</param> internal AwaitTaskContinuation(Action action, bool flowExecutionContext) { Debug.Assert(action != null); m_action = action; if (flowExecutionContext) { m_capturedContext = ExecutionContext.Capture(); } } /// <summary>Creates a task to run the action with the specified state on the specified scheduler.</summary> /// <param name="action">The action to run. Must not be null.</param> /// <param name="state">The state to pass to the action. Must not be null.</param> /// <param name="scheduler">The scheduler to target.</param> /// <returns>The created task.</returns> protected Task CreateTask(Action<object> action, object state, TaskScheduler scheduler) { Debug.Assert(action != null); Debug.Assert(scheduler != null); return new Task( action, state, null, default, TaskCreationOptions.None, InternalTaskOptions.QueuedByRuntime, scheduler) { CapturedContext = m_capturedContext }; } /// <summary>Inlines or schedules the continuation onto the default scheduler.</summary> /// <param name="task">The antecedent task, which is ignored.</param> /// <param name="canInlineContinuationTask">true if inlining is permitted; otherwise, false.</param> internal override void Run(Task task, bool canInlineContinuationTask) { // For the base AwaitTaskContinuation, we allow inlining if our caller allows it // and if we're in a "valid location" for it. See the comments on // IsValidLocationForInlining for more about what's valid. For performance // reasons we would like to always inline, but we don't in some cases to avoid // running arbitrary amounts of work in suspected "bad locations", like UI threads. if (canInlineContinuationTask && IsValidLocationForInlining) { RunCallback(GetInvokeActionCallback(), m_action, ref Task.t_currentTask); // any exceptions from m_action will be handled by s_callbackRunAction } else { TplEventSource log = TplEventSource.Log; if (log.IsEnabled()) { m_continuationId = Task.NewId(); log.AwaitTaskContinuationScheduled((task.ExecutingTaskScheduler ?? TaskScheduler.Default).Id, task.Id, m_continuationId); } // We couldn't inline, so now we need to schedule it ThreadPool.UnsafeQueueUserWorkItemInternal(this, preferLocal: true); } } /// <summary> /// Gets whether the current thread is an appropriate location to inline a continuation's execution. /// </summary> /// <remarks> /// Returns whether SynchronizationContext is null and we're in the default scheduler. /// If the await had a SynchronizationContext/TaskScheduler where it began and the /// default/ConfigureAwait(true) was used, then we won't be on this path. If, however, /// ConfigureAwait(false) was used, or the SynchronizationContext and TaskScheduler were /// naturally null/Default, then we might end up here. If we do, we need to make sure /// that we don't execute continuations in a place that isn't set up to handle them, e.g. /// running arbitrary amounts of code on the UI thread. It would be "correct", but very /// expensive, to always run the continuations asynchronously, incurring lots of context /// switches and allocations and locks and the like. As such, we employ the heuristic /// that if the current thread has a non-null SynchronizationContext or a non-default /// scheduler, then we better not run arbitrary continuations here. /// </remarks> internal static bool IsValidLocationForInlining { get { // If there's a SynchronizationContext, we'll be conservative and say // this is a bad location to inline. var ctx = SynchronizationContext.Current; if (ctx != null && ctx.GetType() != typeof(SynchronizationContext)) return false; // Similarly, if there's a non-default TaskScheduler, we'll be conservative // and say this is a bad location to inline. var sched = TaskScheduler.InternalCurrent; return sched == null || sched == TaskScheduler.Default; } } void IThreadPoolWorkItem.Execute() { var log = TplEventSource.Log; ExecutionContext context = m_capturedContext; if (!log.IsEnabled() && context == null) { m_action(); return; } Guid savedActivityId = default; if (log.TasksSetActivityIds && m_continuationId != 0) { Guid activityId = TplEventSource.CreateGuidForTaskID(m_continuationId); System.Diagnostics.Tracing.EventSource.SetCurrentThreadActivityId(activityId, out savedActivityId); } try { // We're not inside of a task, so t_currentTask doesn't need to be specially maintained. // We're on a thread pool thread with no higher-level callers, so exceptions can just propagate. ExecutionContext.CheckThreadPoolAndContextsAreDefault(); // If there's no execution context or Default, just invoke the delegate as ThreadPool is on Default context. // We don't have to use ExecutionContext.Run for the Default context here as there is no extra processing after the delegate if (context == null || context.IsDefault) { m_action(); } // If there is an execution context, get the cached delegate and run the action under the context. else { ExecutionContext.RunForThreadPoolUnsafe(context, s_invokeAction, m_action); } // ThreadPoolWorkQueue.Dispatch handles notifications and reset context back to default } finally { if (log.TasksSetActivityIds && m_continuationId != 0) { System.Diagnostics.Tracing.EventSource.SetCurrentThreadActivityId(savedActivityId); } } } /// <summary>Cached delegate that invokes an Action passed as an object parameter.</summary> private readonly static ContextCallback s_invokeContextCallback = (state) => ((Action)state)(); private readonly static Action<Action> s_invokeAction = (action) => action(); [MethodImpl(MethodImplOptions.AggressiveInlining)] protected static ContextCallback GetInvokeActionCallback() => s_invokeContextCallback; /// <summary>Runs the callback synchronously with the provided state.</summary> /// <param name="callback">The callback to run.</param> /// <param name="state">The state to pass to the callback.</param> /// <param name="currentTask">A reference to Task.t_currentTask.</param> protected void RunCallback(ContextCallback callback, object state, ref Task currentTask) { Debug.Assert(callback != null); Debug.Assert(currentTask == Task.t_currentTask); // Pretend there's no current task, so that no task is seen as a parent // and TaskScheduler.Current does not reflect false information var prevCurrentTask = currentTask; try { if (prevCurrentTask != null) currentTask = null; ExecutionContext context = m_capturedContext; if (context == null) { // If there's no captured context, just run the callback directly. callback(state); } else { // Otherwise, use the captured context to do so. ExecutionContext.RunInternal(context, callback, state); } } catch (Exception exception) // we explicitly do not request handling of dangerous exceptions like AVs { Task.ThrowAsync(exception, targetContext: null); } finally { // Restore the current task information if (prevCurrentTask != null) currentTask = prevCurrentTask; } } /// <summary>Invokes or schedules the action to be executed.</summary> /// <param name="action">The action to invoke or queue.</param> /// <param name="allowInlining"> /// true to allow inlining, or false to force the action to run asynchronously. /// </param> /// <remarks> /// No ExecutionContext work is performed used. This method is only used in the /// case where a raw Action continuation delegate was stored into the Task, which /// only happens in Task.SetContinuationForAwait if execution context flow was disabled /// via using TaskAwaiter.UnsafeOnCompleted or a similar path. /// </remarks> internal static void RunOrScheduleAction(Action action, bool allowInlining) { ref Task currentTask = ref Task.t_currentTask; Task prevCurrentTask = currentTask; // If we're not allowed to run here, schedule the action if (!allowInlining || !IsValidLocationForInlining) { UnsafeScheduleAction(action, prevCurrentTask); return; } // Otherwise, run it, making sure that t_currentTask is null'd out appropriately during the execution try { if (prevCurrentTask != null) currentTask = null; action(); } catch (Exception exception) { Task.ThrowAsync(exception, targetContext: null); } finally { if (prevCurrentTask != null) currentTask = prevCurrentTask; } } /// <summary>Invokes or schedules the action to be executed.</summary> /// <param name="box">The <see cref="IAsyncStateMachineBox"/> that needs to be invoked or queued.</param> /// <param name="allowInlining"> /// true to allow inlining, or false to force the box's action to run asynchronously. /// </param> internal static void RunOrScheduleAction(IAsyncStateMachineBox box, bool allowInlining) { // Same logic as in the RunOrScheduleAction(Action, ...) overload, except invoking // box.Invoke instead of action(). ref Task currentTask = ref Task.t_currentTask; Task prevCurrentTask = currentTask; // If we're not allowed to run here, schedule the action if (!allowInlining || !IsValidLocationForInlining) { // If logging is disabled, we can simply queue the box itself as a custom work // item, and its work item execution will just invoke its MoveNext. However, if // logging is enabled, there is pre/post-work we need to do around logging to // match what's done for other continuations, and that requires flowing additional // information into the continuation, which we don't want to burden other cases of the // box with... so, in that case we just delegate to the AwaitTaskContinuation-based // path that already handles this, albeit at the expense of allocating the ATC // object, and potentially forcing the box's delegate into existence, when logging // is enabled. if (TplEventSource.Log.IsEnabled()) { UnsafeScheduleAction(box.MoveNextAction, prevCurrentTask); } else { ThreadPool.UnsafeQueueUserWorkItemInternal(box, preferLocal: true); } return; } // Otherwise, run it, making sure that t_currentTask is null'd out appropriately during the execution try { if (prevCurrentTask != null) currentTask = null; box.MoveNext(); } catch (Exception exception) { Task.ThrowAsync(exception, targetContext: null); } finally { if (prevCurrentTask != null) currentTask = prevCurrentTask; } } /// <summary>Schedules the action to be executed. No ExecutionContext work is performed used.</summary> /// <param name="action">The action to invoke or queue.</param> /// <param name="task">The task scheduling the action.</param> internal static void UnsafeScheduleAction(Action action, Task task) { AwaitTaskContinuation atc = new AwaitTaskContinuation(action, flowExecutionContext: false); var log = TplEventSource.Log; if (log.IsEnabled() && task != null) { atc.m_continuationId = Task.NewId(); log.AwaitTaskContinuationScheduled((task.ExecutingTaskScheduler ?? TaskScheduler.Default).Id, task.Id, atc.m_continuationId); } ThreadPool.UnsafeQueueUserWorkItemInternal(atc, preferLocal: true); } internal override Delegate[] GetDelegateContinuationsForDebugger() { Debug.Assert(m_action != null); return new Delegate[] { AsyncMethodBuilderCore.TryGetStateMachineForDebugger(m_action) }; } } }
using System; using System.Collections.Generic; using System.IO; using GLTF.JsonExtensions; using Newtonsoft.Json; using UnityEngine; namespace GLTF { /// <summary> /// The root object for a glTF asset. /// </summary> public class GLTFRoot : GLTFProperty { /// <summary> /// Names of glTF extensions used somewhere in this asset. /// </summary> public List<string> ExtensionsUsed; /// <summary> /// Names of glTF extensions required to properly load this asset. /// </summary> public List<string> ExtensionsRequired; /// <summary> /// An array of accessors. An accessor is a typed view into a bufferView. /// </summary> public List<Accessor> Accessors; /// <summary> /// An array of keyframe animations. /// </summary> public List<GLTFAnimation> Animations; /// <summary> /// Metadata about the glTF asset. /// </summary> public Asset Asset; /// <summary> /// An array of buffers. A buffer points to binary geometry, animation, or skins. /// </summary> public List<Buffer> Buffers; /// <summary> /// An array of bufferViews. /// A bufferView is a view into a buffer generally representing a subset of the buffer. /// </summary> public List<BufferView> BufferViews; /// <summary> /// An array of cameras. A camera defines a projection matrix. /// </summary> public List<GLTFCamera> Cameras; /// <summary> /// An array of images. An image defines data used to create a texture. /// </summary> public List<Image> Images; /// <summary> /// An array of materials. A material defines the appearance of a primitive. /// </summary> public List<Material> Materials; /// <summary> /// An array of meshes. A mesh is a set of primitives to be rendered. /// </summary> public List<Mesh> Meshes; /// <summary> /// An array of nodes. /// </summary> public List<Node> Nodes; /// <summary> /// An array of samplers. A sampler contains properties for texture filtering and wrapping modes. /// </summary> public List<Sampler> Samplers; /// <summary> /// The index of the default scene. /// </summary> public SceneId Scene; /// <summary> /// An array of scenes. /// </summary> public List<Scene> Scenes; /// <summary> /// An array of skins. A skin is defined by joints and matrices. /// </summary> public List<Skin> Skins; /// <summary> /// An array of textures. /// </summary> public List<Texture> Textures; /// <summary> /// Return the default scene. When scene is null, scene of index 0 will be returned. /// When scenes list is null or empty, returns null. /// </summary> public Scene GetDefaultScene() { if (Scene != null) { return Scene.Value; } if (Scenes.Count > 0) { return Scenes[0]; } return null; } public static GLTFRoot Deserialize(JsonReader reader) { var root = new GLTFRoot(); if (reader.Read() && reader.TokenType != JsonToken.StartObject) { throw new Exception("gltf json must be an object"); } while (reader.Read() && reader.TokenType == JsonToken.PropertyName) { var curProp = reader.Value.ToString(); switch (curProp) { case "extensionsUsed": root.ExtensionsUsed = reader.ReadStringList(); break; case "extensionsRequired": root.ExtensionsRequired = reader.ReadStringList(); break; case "accessors": root.Accessors = reader.ReadList(() => Accessor.Deserialize(root, reader)); break; case "animations": root.Animations = reader.ReadList(() => GLTFAnimation.Deserialize(root, reader)); break; case "asset": root.Asset = Asset.Deserialize(root, reader); break; case "buffers": root.Buffers = reader.ReadList(() => Buffer.Deserialize(root, reader)); break; case "bufferViews": root.BufferViews = reader.ReadList(() => BufferView.Deserialize(root, reader)); break; case "cameras": root.Cameras = reader.ReadList(() => GLTFCamera.Deserialize(root, reader)); break; case "images": root.Images = reader.ReadList(() => Image.Deserialize(root, reader)); break; case "materials": root.Materials = reader.ReadList(() => Material.Deserialize(root, reader)); break; case "meshes": root.Meshes = reader.ReadList(() => Mesh.Deserialize(root, reader)); break; case "nodes": root.Nodes = reader.ReadList(() => Node.Deserialize(root, reader)); break; case "samplers": root.Samplers = reader.ReadList(() => Sampler.Deserialize(root, reader)); break; case "scene": root.Scene = SceneId.Deserialize(root, reader); break; case "scenes": root.Scenes = reader.ReadList(() => GLTF.Scene.Deserialize(root, reader)); break; case "skins": root.Skins = reader.ReadList(() => Skin.Deserialize(root, reader)); break; case "textures": root.Textures = reader.ReadList(() => Texture.Deserialize(root, reader)); break; default: root.DefaultPropertyDeserializer(root, reader); break; } } return root; } public override void Serialize(JsonWriter writer) { writer.WriteStartObject(); if (ExtensionsUsed != null && ExtensionsUsed.Count > 0) { writer.WritePropertyName("extensionsUsed"); writer.WriteStartArray(); foreach (var extension in ExtensionsUsed) { writer.WriteValue(extension); } writer.WriteEndArray(); } if (ExtensionsRequired != null && ExtensionsRequired.Count > 0) { writer.WritePropertyName("extensionsRequired"); writer.WriteStartArray(); foreach (var extension in ExtensionsRequired) { writer.WriteValue(extension); } writer.WriteEndArray(); } if (Accessors != null && Accessors.Count > 0) { writer.WritePropertyName("accessors"); writer.WriteStartArray(); foreach (var accessor in Accessors) { accessor.Serialize(writer); } writer.WriteEndArray(); } if (Animations != null && Animations.Count > 0) { writer.WritePropertyName("animations"); writer.WriteStartArray(); foreach (var animation in Animations) { animation.Serialize(writer); } writer.WriteEndArray(); } writer.WritePropertyName("asset"); Asset.Serialize(writer); if (Buffers != null && Buffers.Count > 0) { writer.WritePropertyName("buffers"); writer.WriteStartArray(); foreach (var buffer in Buffers) { buffer.Serialize(writer); } writer.WriteEndArray(); } if (BufferViews != null && BufferViews.Count > 0) { writer.WritePropertyName("bufferViews"); writer.WriteStartArray(); foreach (var bufferView in BufferViews) { bufferView.Serialize(writer); } writer.WriteEndArray(); } if (Cameras != null && Cameras.Count > 0) { writer.WritePropertyName("cameras"); writer.WriteStartArray(); foreach (var camera in Cameras) { camera.Serialize(writer); } writer.WriteEndArray(); } if (Images != null && Images.Count > 0) { writer.WritePropertyName("images"); writer.WriteStartArray(); foreach (var image in Images) { image.Serialize(writer); } writer.WriteEndArray(); } if (Materials != null && Materials.Count > 0) { writer.WritePropertyName("materials"); writer.WriteStartArray(); foreach (var material in Materials) { material.Serialize(writer); } writer.WriteEndArray(); } if (Meshes != null && Meshes.Count > 0) { writer.WritePropertyName("meshes"); writer.WriteStartArray(); foreach (var mesh in Meshes) { mesh.Serialize(writer); } writer.WriteEndArray(); } if (Nodes != null && Nodes.Count > 0) { writer.WritePropertyName("nodes"); writer.WriteStartArray(); foreach (var node in Nodes) { node.Serialize(writer); } writer.WriteEndArray(); } if (Samplers != null && Samplers.Count > 0) { writer.WritePropertyName("samplers"); writer.WriteStartArray(); foreach (var sampler in Samplers) { sampler.Serialize(writer); } writer.WriteEndArray(); } if (Scene != null) { writer.WritePropertyName("scene"); Scene.Serialize(writer); } if (Scenes != null && Scenes.Count > 0) { writer.WritePropertyName("scenes"); writer.WriteStartArray(); foreach (var scene in Scenes) { scene.Serialize(writer); } writer.WriteEndArray(); } if (Skins != null && Skins.Count > 0) { writer.WritePropertyName("skins"); writer.WriteStartArray(); foreach (var skin in Skins) { skin.Serialize(writer); } writer.WriteEndArray(); } if (Textures != null && Textures.Count > 0) { writer.WritePropertyName("textures"); writer.WriteStartArray(); foreach (var texture in Textures) { texture.Serialize(writer); } writer.WriteEndArray(); } base.Serialize(writer); writer.WriteEndObject(); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. #pragma warning disable 414 using System; public class A{ ////////////////////////////// // Instance Fields public int FldPubInst; private int FldPrivInst; protected int FldFamInst; //Translates to "family" internal int FldAsmInst; //Translates to "assembly" protected internal int FldFoaInst; //Translates to "famorassem" ////////////////////////////// // Static Fields public static int FldPubStat; private static int FldPrivStat; protected static int FldFamStat; //family internal static int FldAsmStat; //assembly protected internal static int FldFoaStat; //famorassem ////////////////////////////////////// // Instance fields for nested classes public Cls ClsPubInst = new Cls(); private Cls ClsPrivInst = new Cls(); protected Cls ClsFamInst = new Cls(); internal Cls ClsAsmInst = new Cls(); protected internal Cls ClsFoaInst = new Cls(); ///////////////////////////////////// // Static fields of nested classes public static Cls ClsPubStat = new Cls(); private static Cls ClsPrivStat = new Cls(); ////////////////////////////// // Instance Methods public int MethPubInst(){ Console.WriteLine("A::MethPubInst()"); return 100; } private int MethPrivInst(){ Console.WriteLine("A::MethPrivInst()"); return 100; } protected int MethFamInst(){ Console.WriteLine("A::MethFamInst()"); return 100; } internal int MethAsmInst(){ Console.WriteLine("A::MethAsmInst()"); return 100; } protected internal int MethFoaInst(){ Console.WriteLine("A::MethFoaInst()"); return 100; } ////////////////////////////// // Static Methods public static int MethPubStat(){ Console.WriteLine("A::MethPubStat()"); return 100; } private static int MethPrivStat(){ Console.WriteLine("A::MethPrivStat()"); return 100; } protected static int MethFamStat(){ Console.WriteLine("A::MethFamStat()"); return 100; } internal static int MethAsmStat(){ Console.WriteLine("A::MethAsmStat()"); return 100; } protected internal static int MethFoaStat(){ Console.WriteLine("A::MethFoaStat()"); return 100; } ////////////////////////////// // Virtual Instance Methods public virtual int MethPubVirt(){ Console.WriteLine("A::MethPubVirt()"); return 100; } //@csharp - Note that C# won't compile an illegal private virtual function //So there is no negative testing MethPrivVirt() here. protected virtual int MethFamVirt(){ Console.WriteLine("A::MethFamVirt()"); return 100; } internal virtual int MethAsmVirt(){ Console.WriteLine("A::MethAsmVirt()"); return 100; } protected internal virtual int MethFoaVirt(){ Console.WriteLine("A::MethFoaVirt()"); return 100; } public class Cls{ ////////////////////////////// // Instance Fields public int NestFldPubInst; private int NestFldPrivInst; protected int NestFldFamInst; //Translates to "family" internal int NestFldAsmInst; //Translates to "assembly" protected internal int NestFldFoaInst; //Translates to "famorassem" ////////////////////////////// // Static Fields public static int NestFldPubStat; private static int NestFldPrivStat; protected static int NestFldFamStat; //family internal static int NestFldAsmStat; //assembly protected internal static int NestFldFoaStat; //famorassem ////////////////////////////////////// // Instance fields for nested classes public Cls2 Cls2PubInst = new Cls2(); private Cls2 Cls2PrivInst = new Cls2(); protected Cls2 Cls2FamInst = new Cls2(); internal Cls2 Cls2AsmInst = new Cls2(); protected internal Cls2 Cls2FoaInst = new Cls2(); ///////////////////////////////////// // Static fields of nested classes public static Cls ClsPubStat = new Cls(); private static Cls ClsPrivStat = new Cls(); ////////////////////////////// // Instance NestMethods public int NestMethPubInst(){ Console.WriteLine("A::NestMethPubInst()"); return 100; } private int NestMethPrivInst(){ Console.WriteLine("A::NestMethPrivInst()"); return 100; } protected int NestMethFamInst(){ Console.WriteLine("A::NestMethFamInst()"); return 100; } internal int NestMethAsmInst(){ Console.WriteLine("A::NestMethAsmInst()"); return 100; } protected internal int NestMethFoaInst(){ Console.WriteLine("A::NestMethFoaInst()"); return 100; } ////////////////////////////// // Static NestMethods public static int NestMethPubStat(){ Console.WriteLine("A::NestMethPubStat()"); return 100; } private static int NestMethPrivStat(){ Console.WriteLine("A::NestMethPrivStat()"); return 100; } protected static int NestMethFamStat(){ Console.WriteLine("A::NestMethFamStat()"); return 100; } internal static int NestMethAsmStat(){ Console.WriteLine("A::NestMethAsmStat()"); return 100; } protected internal static int NestMethFoaStat(){ Console.WriteLine("A::NestMethFoaStat()"); return 100; } ////////////////////////////// // Virtual Instance NestMethods public virtual int NestMethPubVirt(){ Console.WriteLine("A::NestMethPubVirt()"); return 100; } //@csharp - Note that C# won't compile an illegal private virtual function //So there is no negative testing NestMethPrivVirt() here. protected virtual int NestMethFamVirt(){ Console.WriteLine("A::NestMethFamVirt()"); return 100; } internal virtual int NestMethAsmVirt(){ Console.WriteLine("A::NestMethAsmVirt()"); return 100; } protected internal virtual int NestMethFoaVirt(){ Console.WriteLine("A::NestMethFoaVirt()"); return 100; } public class Cls2{ public int Test(){ int mi_RetCode = 100; ///////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////// // ACCESS ENCLOSING FIELDS/MEMBERS ///////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////// //@csharp - C# will not allow nested classes to access non-static members of their enclosing classes ///////////////////////////////// // Test static field access FldPubStat = 100; if(FldPubStat != 100) mi_RetCode = 0; FldFamStat = 100; if(FldFamStat != 100) mi_RetCode = 0; FldAsmStat = 100; if(FldAsmStat != 100) mi_RetCode = 0; FldFoaStat = 100; if(FldFoaStat != 100) mi_RetCode = 0; ///////////////////////////////// // Test static method access if(MethPubStat() != 100) mi_RetCode = 0; if(MethFamStat() != 100) mi_RetCode = 0; if(MethAsmStat() != 100) mi_RetCode = 0; if(MethFoaStat() != 100) mi_RetCode = 0; //////////////////////////////////////////// // Test access from within the nested class //@todo - Look at testing accessing one nested class from another, @bugug - NEED TO ADD SUCH TESTING, access the public nested class fields from here, etc... ///////////////////////////////// // Test static field access NestFldPubStat = 100; if(NestFldPubStat != 100) mi_RetCode = 0; NestFldFamStat = 100; if(NestFldFamStat != 100) mi_RetCode = 0; NestFldAsmStat = 100; if(NestFldAsmStat != 100) mi_RetCode = 0; NestFldFoaStat = 100; if(NestFldFoaStat != 100) mi_RetCode = 0; ///////////////////////////////// // Test static method access if(NestMethPubStat() != 100) mi_RetCode = 0; if(NestMethFamStat() != 100) mi_RetCode = 0; if(NestMethAsmStat() != 100) mi_RetCode = 0; if(NestMethFoaStat() != 100) mi_RetCode = 0; return mi_RetCode; } ////////////////////////////// // Instance Fields public int Nest2FldPubInst; private int Nest2FldPrivInst; protected int Nest2FldFamInst; //Translates to "family" internal int Nest2FldAsmInst; //Translates to "assembly" protected internal int Nest2FldFoaInst; //Translates to "famorassem" ////////////////////////////// // Static Fields public static int Nest2FldPubStat; private static int Nest2FldPrivStat; protected static int Nest2FldFamStat; //family internal static int Nest2FldAsmStat; //assembly protected internal static int Nest2FldFoaStat; //famorassem ////////////////////////////// // Instance Nest2Methods public int Nest2MethPubInst(){ Console.WriteLine("A::Nest2MethPubInst()"); return 100; } private int Nest2MethPrivInst(){ Console.WriteLine("A::Nest2MethPrivInst()"); return 100; } protected int Nest2MethFamInst(){ Console.WriteLine("A::Nest2MethFamInst()"); return 100; } internal int Nest2MethAsmInst(){ Console.WriteLine("A::Nest2MethAsmInst()"); return 100; } protected internal int Nest2MethFoaInst(){ Console.WriteLine("A::Nest2MethFoaInst()"); return 100; } ////////////////////////////// // Static Nest2Methods public static int Nest2MethPubStat(){ Console.WriteLine("A::Nest2MethPubStat()"); return 100; } private static int Nest2MethPrivStat(){ Console.WriteLine("A::Nest2MethPrivStat()"); return 100; } protected static int Nest2MethFamStat(){ Console.WriteLine("A::Nest2MethFamStat()"); return 100; } internal static int Nest2MethAsmStat(){ Console.WriteLine("A::Nest2MethAsmStat()"); return 100; } protected internal static int Nest2MethFoaStat(){ Console.WriteLine("A::Nest2MethFoaStat()"); return 100; } ////////////////////////////// // Virtual Instance Nest2Methods public virtual int Nest2MethPubVirt(){ Console.WriteLine("A::Nest2MethPubVirt()"); return 100; } //@csharp - Note that C# won't compile an illegal private virtual function //So there is no negative testing Nest2MethPrivVirt() here. protected virtual int Nest2MethFamVirt(){ Console.WriteLine("A::Nest2MethFamVirt()"); return 100; } internal virtual int Nest2MethAsmVirt(){ Console.WriteLine("A::Nest2MethAsmVirt()"); return 100; } protected internal virtual int Nest2MethFoaVirt(){ Console.WriteLine("A::Nest2MethFoaVirt()"); return 100; } } } }
//#define ASTAR_PROFILE using Pathfinding; using Pathfinding.Util; using System.Collections.Generic; using UnityEngine; namespace Pathfinding { /** Contains useful functions for working with paths and nodes. * This class works a lot with the Node class, a useful function to get nodes is AstarPath.GetNearest. * \see AstarPath.GetNearest * \see Pathfinding.Utils.GraphUpdateUtilities * \since Added in version 3.2 * \ingroup utils * */ public static class PathUtilities { /** Returns if there is a walkable path from \a n1 to \a n2. * If you are making changes to the graph, areas must first be recaculated using FloodFill() * \note This might return true for small areas even if there is no possible path if AstarPath.minAreaSize is greater than zero (0). * So when using this, it is recommended to set AstarPath.minAreaSize to 0. (A* Inspector -> Settings -> Pathfinding) * \see AstarPath.GetNearest */ public static bool IsPathPossible (GraphNode n1, GraphNode n2) { return n1.Walkable && n2.Walkable && n1.Area == n2.Area; } /** Returns if there are walkable paths between all nodes. * If you are making changes to the graph, areas must first be recaculated using FloodFill() * \note This might return true for small areas even if there is no possible path if AstarPath.minAreaSize is greater than zero (0). * So when using this, it is recommended to set AstarPath.minAreaSize to 0. (A* Inspector -> Settings -> Pathfinding) * \see AstarPath.GetNearest */ public static bool IsPathPossible (List<GraphNode> nodes) { uint area = nodes[0].Area; for (int i=0;i<nodes.Count;i++) if (!nodes[i].Walkable || nodes[i].Area != area) return false; return true; } /** Returns all nodes reachable from the seed node. * This function performs a BFS (breadth-first-search) or flood fill of the graph and returns all nodes which can be reached from * the seed node. In almost all cases this will be identical to returning all nodes which have the same area as the seed node. * In the editor areas are displayed as different colors of the nodes. * The only case where it will not be so is when there is a one way path from some part of the area to the seed node * but no path from the seed node to that part of the graph. * * The returned list is sorted by node distance from the seed node * i.e distance is measured in the number of nodes the shortest path from \a seed to that node would pass through. * Note that the distance measurement does not take heuristics, penalties or tag penalties. * * Depending on the number of reachable nodes, this function can take quite some time to calculate * so don't use it too often or it might affect the framerate of your game. * * \param seed The node to start the search from * \param tagMask Optional mask for tags. This is a bitmask. * * \returns A List<Node> containing all nodes reachable from the seed node. * For better memory management the returned list should be pooled, see Pathfinding.Util.ListPool */ public static List<GraphNode> GetReachableNodes (GraphNode seed, int tagMask = -1) { #if ASTAR_PROFILE System.Diagnostics.Stopwatch watch = new System.Diagnostics.Stopwatch(); watch.Start (); #endif Stack<GraphNode> stack = Pathfinding.Util.StackPool<GraphNode>.Claim (); List<GraphNode> list = Pathfinding.Util.ListPool<GraphNode>.Claim (); HashSet<GraphNode> map = new HashSet<GraphNode>(); GraphNodeDelegate callback; if (tagMask == -1) { callback = delegate (GraphNode node) { if (node.Walkable && map.Add (node)) { list.Add (node); stack.Push (node); } }; } else { callback = delegate (GraphNode node) { if (node.Walkable && ((tagMask >> (int)node.Tag) & 0x1) != 0 && map.Add (node)) { list.Add (node); stack.Push (node); } }; } callback (seed); while (stack.Count > 0) { stack.Pop ().GetConnections (callback); } Pathfinding.Util.StackPool<GraphNode>.Release (stack); #if ASTAR_PROFILE watch.Stop (); Debug.Log ((1000*watch.Elapsed.TotalSeconds).ToString("0.0 ms")); #endif return list; } /** Returns all nodes within a given node-distance from the seed node. * This function performs a BFS (breadth-first-search) or flood fill of the graph and returns all nodes within a specified node distance which can be reached from * the seed node. In almost all cases when \a depth is large enough this will be identical to returning all nodes which have the same area as the seed node. * In the editor areas are displayed as different colors of the nodes. * The only case where it will not be so is when there is a one way path from some part of the area to the seed node * but no path from the seed node to that part of the graph. * * The returned list is sorted by node distance from the seed node * i.e distance is measured in the number of nodes the shortest path from \a seed to that node would pass through. * Note that the distance measurement does not take heuristics, penalties or tag penalties. * * Depending on the number of nodes, this function can take quite some time to calculate * so don't use it too often or it might affect the framerate of your game. * * \param seed The node to start the search from. * \param depth The maximum node-distance from the seed node. * \param tagMask Optional mask for tags. This is a bitmask. * * \returns A List<Node> containing all nodes reachable within a specified node distance from the seed node. * For better memory management the returned list should be pooled, see Pathfinding.Util.ListPool */ public static List<GraphNode> BFS (GraphNode seed, int depth, int tagMask = -1) { #if ASTAR_PROFILE System.Diagnostics.Stopwatch watch = new System.Diagnostics.Stopwatch(); watch.Start (); #endif List<GraphNode> que = Pathfinding.Util.ListPool<GraphNode>.Claim (); List<GraphNode> list = Pathfinding.Util.ListPool<GraphNode>.Claim (); /** \todo Pool */ Dictionary<GraphNode,int> map = new Dictionary<GraphNode,int>(); int currentDist = 0; GraphNodeDelegate callback; if (tagMask == -1) { callback = delegate (GraphNode node) { if (node.Walkable && !map.ContainsKey (node)) { map.Add (node, currentDist+1); list.Add (node); que.Add (node); } }; } else { callback = delegate (GraphNode node) { if (node.Walkable && ((tagMask >> (int)node.Tag) & 0x1) != 0 && !map.ContainsKey (node)) { map.Add (node, currentDist+1); list.Add (node); que.Add (node); } }; } map[seed] = currentDist; callback (seed); while (que.Count > 0 && currentDist < depth ) { GraphNode n = que[que.Count-1]; currentDist = map[n]; que.RemoveAt ( que.Count-1 ); n.GetConnections (callback); } Pathfinding.Util.ListPool<GraphNode>.Release (que); #if ASTAR_PROFILE watch.Stop (); Debug.Log ((1000*watch.Elapsed.TotalSeconds).ToString("0.0 ms")); #endif return list; } /** Returns points in a spiral centered around the origin with a minimum clearance from other points. * The points are laid out on the involute of a circle * \see http://en.wikipedia.org/wiki/Involute * Which has some nice properties. * All points are separated by \a clearance world units. * This method is O(n), yes if you read the code you will see a binary search, but that binary search * has an upper bound on the number of steps, so it does not yield a log factor. * * \note Consider recycling the list after usage to reduce allocations. * \see Pathfinding.Util.ListPool */ public static List<Vector3> GetSpiralPoints (int count, float clearance) { List<Vector3> pts = Pathfinding.Util.ListPool<Vector3>.Claim(count); // The radius of the smaller circle used for generating the involute of a circle // Calculated from the separation distance between the turns float a = clearance/(2*Mathf.PI); float t = 0; pts.Add (InvoluteOfCircle(a, t)); for (int i=0;i<count;i++) { Vector3 prev = pts[pts.Count-1]; // d = -t0/2 + sqrt( t0^2/4 + 2d/a ) // Minimum angle (radians) which would create an arc distance greater than clearance float d = -t/2 + Mathf.Sqrt (t*t/4 + 2*clearance/a); // Binary search for separating this point and the previous one float mn = t + d; float mx = t + 2*d; while (mx - mn > 0.01f) { float mid = (mn + mx)/2; Vector3 p = InvoluteOfCircle (a, mid); if ((p - prev).sqrMagnitude < clearance*clearance) { mn = mid; } else { mx = mid; } } pts.Add ( InvoluteOfCircle (a, mx) ); t = mx; } return pts; } /** Returns the XZ coordinate of the involute of circle. * \see http://en.wikipedia.org/wiki/Involute */ private static Vector3 InvoluteOfCircle (float a, float t) { return new Vector3(a*(Mathf.Cos(t) + t*Mathf.Sin(t)), 0, a*(Mathf.Sin(t) - t*Mathf.Cos(t))); } /** Will calculate a number of points around \a p which are on the graph and are separated by \a clearance from each other. * This is like GetPointsAroundPoint except that \a previousPoints are treated as being in world space. * The average of the points will be found and then that will be treated as the group center. */ public static void GetPointsAroundPointWorld (Vector3 p, IRaycastableGraph g, List<Vector3> previousPoints, float radius, float clearanceRadius) { if ( previousPoints.Count == 0 ) return; Vector3 avg = Vector3.zero; for ( int i = 0; i < previousPoints.Count; i++ ) avg += previousPoints[i]; avg /= previousPoints.Count; for ( int i = 0; i < previousPoints.Count; i++ ) previousPoints[i] -= avg; GetPointsAroundPoint ( p, g, previousPoints, radius, clearanceRadius ); } /** Will calculate a number of points around \a p which are on the graph and are separated by \a clearance from each other. * The maximum distance from \a p to any point will be \a radius. * Points will first be tried to be laid out as \a previousPoints and if that fails, random points will be selected. * This is great if you want to pick a number of target points for group movement. If you pass all current agent points from e.g the group's average position * this method will return target points so that the units move very little within the group, this is often aesthetically pleasing and reduces jitter if using * some kind of local avoidance. * * \param g The graph to use for linecasting. If you are only using one graph, you can get this by AstarPath.active.graphs[0] as IRaycastableGraph. * Note that not all graphs are raycastable, recast, navmesh and grid graphs are raycastable. On recast and navmesh it works the best. * \param previousPoints The points to use for reference. Note that these should not be in world space. They are treated as relative to \a p. */ public static void GetPointsAroundPoint (Vector3 p, IRaycastableGraph g, List<Vector3> previousPoints, float radius, float clearanceRadius) { if (g == null) throw new System.ArgumentNullException ("g"); NavGraph graph = g as NavGraph; if (graph == null) throw new System.ArgumentException ("g is not a NavGraph"); NNInfo nn = graph.GetNearestForce (p, NNConstraint.Default); p = nn.clampedPosition; if (nn.node == null) { // No valid point to start from return; } // Make sure the enclosing circle has a radius which can pack circles with packing density 0.5 radius = Mathf.Max (radius, 1.4142f*clearanceRadius*Mathf.Sqrt(previousPoints.Count));//Mathf.Sqrt(previousPoints.Count*clearanceRadius*2)); clearanceRadius *= clearanceRadius; for (int i=0;i<previousPoints.Count;i++) { Vector3 dir = previousPoints[i]; float magn = dir.magnitude; if (magn > 0) dir /= magn; float newMagn = radius;//magn > radius ? radius : magn; dir *= newMagn; bool worked = false; GraphHitInfo hit; int tests = 0; do { Vector3 pt = p + dir; if (g.Linecast (p, pt, nn.node, out hit)) { pt = hit.point; } for (float q = 0.1f; q <= 1.0f; q+= 0.05f) { Vector3 qt = (pt - p)*q + p; worked = true; for (int j=0;j<i;j++) { if ((previousPoints[j] - qt).sqrMagnitude < clearanceRadius) { worked = false; break; } } if (worked) { previousPoints[i] = qt; break; } } if (!worked) { // Abort after 5 tries if (tests > 8) { worked = true; } else { clearanceRadius *= 0.9f; // This will pick points in 2D closer to the edge of the circle with a higher probability dir = Random.onUnitSphere * Mathf.Lerp (newMagn, radius, tests / 5); dir.y = 0; tests++; } } } while (!worked); } } /** Returns randomly selected points on the specified nodes with each point being separated by \a clearanceRadius from each other. * Selecting points ON the nodes only works for TriangleMeshNode (used by Recast Graph and Navmesh Graph) and GridNode (used by GridGraph). * For other node types, only the positions of the nodes will be used. * * clearanceRadius will be reduced if no valid points can be found. */ public static List<Vector3> GetPointsOnNodes (List<GraphNode> nodes, int count, float clearanceRadius = 0) { if (nodes == null) throw new System.ArgumentNullException ("nodes"); if (nodes.Count == 0) throw new System.ArgumentException ("no nodes passed"); System.Random rnd = new System.Random(); List<Vector3> pts = Pathfinding.Util.ListPool<Vector3>.Claim(count); // Square clearanceRadius *= clearanceRadius; if (nodes[0] is TriangleMeshNode || nodes[0] is GridNode) { //Assume all nodes are triangle nodes or grid nodes List<float> accs = Pathfinding.Util.ListPool<float>.Claim(nodes.Count); float tot = 0; for (int i=0;i<nodes.Count;i++) { TriangleMeshNode tnode = nodes[i] as TriangleMeshNode; if (tnode != null) { float a = System.Math.Abs(Polygon.TriangleArea(tnode.GetVertex(0), tnode.GetVertex(1), tnode.GetVertex(2))); tot += a; accs.Add (tot); } #if !ASTAR_NO_GRID_GRAPH else { GridNode gnode = nodes[i] as GridNode; if (gnode != null) { GridGraph gg = GridNode.GetGridGraph (gnode.GraphIndex); float a = gg.nodeSize*gg.nodeSize; tot += a; accs.Add (tot); } else { accs.Add(tot); } } #endif } for (int i=0;i<count;i++) { //Pick point int testCount = 0; int testLimit = 10; bool worked = false; while (!worked) { worked = true; //If no valid points can be found, progressively lower the clearance radius until such a point is found if (testCount >= testLimit) { clearanceRadius *= 0.8f; testLimit += 10; if (testLimit > 100) clearanceRadius = 0; } float tg = (float)rnd.NextDouble()*tot; int v = accs.BinarySearch(tg); if (v < 0) v = ~v; if (v >= nodes.Count) { // This shouldn't happen, due to NextDouble being smaller than 1... but I don't trust floating point arithmetic. worked = false; continue; } TriangleMeshNode node = nodes[v] as TriangleMeshNode; Vector3 p; if (node != null) { // Find a random point inside the triangle float v1; float v2; do { v1 = (float)rnd.NextDouble(); v2 = (float)rnd.NextDouble(); } while (v1+v2 > 1); p = ((Vector3)(node.GetVertex(1)-node.GetVertex(0)))*v1 + ((Vector3)(node.GetVertex(2)-node.GetVertex(0)))*v2 + (Vector3)node.GetVertex(0); } else { #if !ASTAR_NO_GRID_GRAPH GridNode gnode = nodes[v] as GridNode; if (gnode != null) { GridGraph gg = GridNode.GetGridGraph (gnode.GraphIndex); float v1 = (float)rnd.NextDouble(); float v2 = (float)rnd.NextDouble(); p = (Vector3)gnode.position + new Vector3(v1 - 0.5f, 0, v2 - 0.5f) * gg.nodeSize; } else #endif { //Point nodes have no area, so we break directly instead pts.Add ((Vector3)nodes[v].position); break; } } // Test if it is some distance away from the other points if (clearanceRadius > 0) { for (int j=0;j<pts.Count;j++) { if ((pts[j]-p).sqrMagnitude < clearanceRadius) { worked = false; break; } } } if (worked) { pts.Add (p); break; } else { testCount++; } } } Pathfinding.Util.ListPool<float>.Release(accs); } else { for (int i=0;i<count;i++) { pts.Add ((Vector3)nodes[rnd.Next (nodes.Count)].position); } } return pts; } } }
/* * Copyright (c) 2015, InWorldz Halcyon Developers * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * * Neither the name of halcyon nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.Linq; using System.Text; using ProtoBuf; using OpenSim.Framework; using OpenMetaverse; namespace InWorldz.Region.Data.Thoosa.Serialization { /// <summary> /// Protobuf serializable PrimitiveBaseShape /// </summary> [ProtoContract] public class PrimShapeSnapshot { [ProtoMember(1)] public byte ProfileCurve; [ProtoMember(2)] public byte[] TextureEntry; [ProtoMember(3)] public byte[] ExtraParams; [ProtoMember(4)] public ushort PathBegin; [ProtoMember(5)] public byte PathCurve; [ProtoMember(6)] public ushort PathEnd; [ProtoMember(7)] public sbyte PathRadiusOffset; [ProtoMember(8)] public byte PathRevolutions; [ProtoMember(9)] public byte PathScaleX; [ProtoMember(10)] public byte PathScaleY; [ProtoMember(11)] public byte PathShearX; [ProtoMember(12)] public byte PathShearY; [ProtoMember(13)] public sbyte PathTwist; [ProtoMember(14)] public sbyte PathTwistBegin; [ProtoMember(15)] public byte PCode; [ProtoMember(16)] public ushort ProfileBegin; [ProtoMember(17)] public ushort ProfileEnd; [ProtoMember(18)] public ushort ProfileHollow; [ProtoMember(19)] public OpenMetaverse.Vector3 Scale; [ProtoMember(20)] public byte State; [ProtoMember(21)] public ProfileShape ProfileShape; [ProtoMember(22)] public HollowShape HollowShape; [ProtoMember(23)] public Guid SculptTexture; [ProtoMember(24)] public byte SculptType; [Obsolete("This attribute is no longer serialized")] [ProtoMember(25)] public byte[] SculptData { get { return null; } set { } } [ProtoMember(26)] public int FlexiSoftness; [ProtoMember(27)] public float FlexiTension; [ProtoMember(28)] public float FlexiDrag; [ProtoMember(29)] public float FlexiGravity; [ProtoMember(30)] public float FlexiWind; [ProtoMember(31)] public float FlexiForceX; [ProtoMember(32)] public float FlexiForceY; [ProtoMember(33)] public float FlexiForceZ; [ProtoMember(34)] public float[] LightColor; [ProtoMember(35)] public float LightRadius; [ProtoMember(36)] public float LightCutoff; [ProtoMember(37)] public float LightIntensity; [ProtoMember(38)] public bool FlexiEntry; [ProtoMember(39)] public bool LightEntry; [ProtoMember(40)] public bool SculptEntry; [ProtoMember(41)] public bool ProjectionEntry; [ProtoMember(42)] public Guid ProjectionTextureId; [ProtoMember(43)] public float ProjectionFOV; [ProtoMember(44)] public float ProjectionFocus; [ProtoMember(45)] public float ProjectionAmbiance; [ProtoMember(46)] public OpenMetaverse.PhysicsShapeType PreferredPhysicsShape; [ProtoMember(47)] public MediaEntrySnapshot[] MediaList; [ProtoMember(48)] public sbyte PathSkew; [ProtoMember(49)] public sbyte PathTaperX; [ProtoMember(50)] public sbyte PathTaperY; [ProtoMember(51)] public int VertexCount; [ProtoMember(52)] public int HighLODBytes; [ProtoMember(53)] public int MidLODBytes; [ProtoMember(54)] public int LowLODBytes; [ProtoMember(55)] public int LowestLODBytes; [ProtoMember(56)] public RenderMaterials RenderMaterials; static PrimShapeSnapshot() { ProtoBuf.Serializer.PrepareSerializer<PrimShapeSnapshot>(); } public static PrimShapeSnapshot FromShape(PrimitiveBaseShape primitiveBaseShape) { return new PrimShapeSnapshot { ExtraParams = primitiveBaseShape.ExtraParams, FlexiDrag = primitiveBaseShape.FlexiDrag, FlexiEntry = primitiveBaseShape.FlexiEntry, FlexiForceX = primitiveBaseShape.FlexiForceX, FlexiForceY = primitiveBaseShape.FlexiForceY, FlexiForceZ = primitiveBaseShape.FlexiForceZ, FlexiGravity = primitiveBaseShape.FlexiGravity, FlexiSoftness = primitiveBaseShape.FlexiSoftness, FlexiTension = primitiveBaseShape.FlexiTension, FlexiWind = primitiveBaseShape.FlexiWind, HollowShape = primitiveBaseShape.HollowShape, LightColor = new float[] { primitiveBaseShape.LightColorA, primitiveBaseShape.LightColorR, primitiveBaseShape.LightColorG, primitiveBaseShape.LightColorB }, LightCutoff = primitiveBaseShape.LightCutoff, LightEntry = primitiveBaseShape.LightEntry, LightIntensity = primitiveBaseShape.LightIntensity, LightRadius = primitiveBaseShape.LightRadius, PathBegin = primitiveBaseShape.PathBegin, PathCurve = primitiveBaseShape.PathCurve, PathEnd = primitiveBaseShape.PathEnd, PathRadiusOffset = primitiveBaseShape.PathRadiusOffset, PathRevolutions = primitiveBaseShape.PathRevolutions, PathScaleX = primitiveBaseShape.PathScaleX, PathScaleY = primitiveBaseShape.PathScaleY, PathShearX = primitiveBaseShape.PathShearX, PathShearY = primitiveBaseShape.PathShearY, PathSkew = primitiveBaseShape.PathSkew, PathTaperX = primitiveBaseShape.PathTaperX, PathTaperY = primitiveBaseShape.PathTaperY, PathTwist = primitiveBaseShape.PathTwist, PathTwistBegin = primitiveBaseShape.PathTwistBegin, PCode = primitiveBaseShape.PCode, PreferredPhysicsShape = primitiveBaseShape.PreferredPhysicsShape, ProfileBegin = primitiveBaseShape.ProfileBegin, ProfileCurve = primitiveBaseShape.ProfileCurve, ProfileEnd = primitiveBaseShape.ProfileEnd, ProfileHollow = primitiveBaseShape.ProfileHollow, ProfileShape = primitiveBaseShape.ProfileShape, ProjectionAmbiance = primitiveBaseShape.ProjectionAmbiance, ProjectionEntry = primitiveBaseShape.ProjectionEntry, ProjectionFocus = primitiveBaseShape.ProjectionFocus, ProjectionFOV = primitiveBaseShape.ProjectionFOV, ProjectionTextureId = primitiveBaseShape.ProjectionTextureUUID.Guid, Scale = primitiveBaseShape.Scale, SculptEntry = primitiveBaseShape.SculptEntry, SculptTexture = primitiveBaseShape.SculptTexture.Guid, SculptType = primitiveBaseShape.SculptType, State = primitiveBaseShape.State, TextureEntry = primitiveBaseShape.TextureEntryBytes, MediaList = MediaEntrySnapshot.SnapshotArrayFromList(primitiveBaseShape.Media), VertexCount = primitiveBaseShape.VertexCount, HighLODBytes = primitiveBaseShape.HighLODBytes, MidLODBytes = primitiveBaseShape.MidLODBytes, LowLODBytes = primitiveBaseShape.LowLODBytes, LowestLODBytes = primitiveBaseShape.LowestLODBytes, RenderMaterials = primitiveBaseShape.RenderMaterials }; } public PrimitiveBaseShape ToPrimitiveBaseShape() { return new PrimitiveBaseShape { ExtraParams = this.ExtraParams, FlexiDrag = this.FlexiDrag, FlexiEntry = this.FlexiEntry, FlexiForceX = this.FlexiForceX, FlexiForceY = this.FlexiForceY, FlexiForceZ = this.FlexiForceZ, FlexiGravity = this.FlexiGravity, FlexiSoftness = this.FlexiSoftness, FlexiTension = this.FlexiTension, FlexiWind = this.FlexiWind, HollowShape = this.HollowShape, LightColorA = this.LightColor[0], LightColorR = this.LightColor[1], LightColorG = this.LightColor[2], LightColorB = this.LightColor[3], LightCutoff = this.LightCutoff, LightEntry = this.LightEntry, LightIntensity = this.LightIntensity, LightRadius = this.LightRadius, PathBegin = this.PathBegin, PathCurve = this.PathCurve, PathEnd = this.PathEnd, PathRadiusOffset = this.PathRadiusOffset, PathRevolutions = this.PathRevolutions, PathScaleX = this.PathScaleX, PathScaleY = this.PathScaleY, PathShearX = this.PathShearX, PathShearY = this.PathShearY, PathSkew = this.PathSkew, PathTaperX = this.PathTaperX, PathTaperY = this.PathTaperY, PathTwist = this.PathTwist, PathTwistBegin = this.PathTwistBegin, PCode = this.PCode, PreferredPhysicsShape = this.PreferredPhysicsShape, ProfileBegin = this.ProfileBegin, ProfileCurve = this.ProfileCurve, ProfileEnd = this.ProfileEnd, ProfileHollow = this.ProfileHollow, ProfileShape = this.ProfileShape, ProjectionAmbiance = this.ProjectionAmbiance, ProjectionEntry = this.ProjectionEntry, ProjectionFocus = this.ProjectionFocus, ProjectionFOV = this.ProjectionFOV, ProjectionTextureUUID = new OpenMetaverse.UUID(this.ProjectionTextureId), Scale = this.Scale, SculptEntry = this.SculptEntry, SculptTexture = new OpenMetaverse.UUID(this.SculptTexture), SculptType = this.SculptType, State = this.State, TextureEntryBytes = this.TextureEntry, Media = MediaEntrySnapshot.SnapshotArrayToList(this.MediaList), VertexCount = this.VertexCount, HighLODBytes = this.HighLODBytes, MidLODBytes = this.MidLODBytes, LowLODBytes = this.LowLODBytes, LowestLODBytes = this.LowestLODBytes, RenderMaterials = this.RenderMaterials != null ? this.RenderMaterials : new RenderMaterials() }; } } }
#region -- License Terms -- // // MessagePack for CLI // // Copyright (C) 2015-2016 FUJIWARA, Yusuke // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // #endregion -- License Terms -- #if UNITY_5 || UNITY_STANDALONE || UNITY_WEBPLAYER || UNITY_WII || UNITY_IPHONE || UNITY_ANDROID || UNITY_PS3 || UNITY_XBOX360 || UNITY_FLASH || UNITY_BKACKBERRY || UNITY_WINRT #define UNITY #endif using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Diagnostics; using System.Linq; namespace MsgPack.Serialization { /// <summary> /// A provider parameter to support polymorphism. /// </summary> [DebuggerDisplay("{DebugString}")] public sealed partial class PolymorphismSchema { /// <summary> /// Gets the type of the serialization target. /// </summary> /// <value> /// The type of the serialization target. This value can be <c>null</c>. /// </value> internal Type TargetType { get; private set; } /// <summary> /// Gets the type of the polymorphism. /// </summary> /// <value> /// The type of the polymorphism. /// </value> internal PolymorphismType PolymorphismType { get; private set; } private readonly ReadOnlyDictionary<string, Type> _codeTypeMapping; /// <summary> /// Gets the code type mapping which maps between ext-type codes and .NET <see cref="Type"/>s. /// </summary> /// <value> /// The code type mapping which maps between ext-type codes and .NET <see cref="Type"/>s. /// </value> internal IDictionary<string, Type> CodeTypeMapping { get { return this._codeTypeMapping; } } internal bool UseDefault { get { return this.PolymorphismType == PolymorphismType.None; } } internal bool UseTypeEmbedding { get { return this.PolymorphismType == PolymorphismType.RuntimeType; } } internal Func<PolymorphicTypeVerificationContext, bool> TypeVerifier { get; private set; } internal PolymorphismSchemaChildrenType ChildrenType { get; private set; } private readonly ReadOnlyCollection<PolymorphismSchema> _children; /// <summary> /// Gets the schema for child items of the serialization target collection/tuple. /// </summary> /// <value> /// The schema for child items of the serialization target collection/tuple. /// </value> internal IList<PolymorphismSchema> ChildSchemaList { get { return this._children; } } /// <summary> /// Gets the schema for collection items of the serialization target collection. /// </summary> /// <value> /// The schema for collection items of the serialization target collection. /// </value> internal PolymorphismSchema ItemSchema { get { switch ( this.ChildrenType ) { case PolymorphismSchemaChildrenType.None: { return null; } case PolymorphismSchemaChildrenType.CollectionItems: { return this._children.FirstOrDefault(); } case PolymorphismSchemaChildrenType.DictionaryKeyValues: { return this._children.Skip( 1 ).FirstOrDefault(); } default: { throw new NotSupportedException(); } } } } private PolymorphismSchema TryGetItemSchema() { switch ( this.ChildrenType ) { case PolymorphismSchemaChildrenType.CollectionItems: { return this._children.FirstOrDefault(); } case PolymorphismSchemaChildrenType.DictionaryKeyValues: { return this._children.Skip( 1 ).FirstOrDefault(); } default: { return null; } } } /// <summary> /// Gets the schema for dictionary keys of the serialization target collection. /// </summary> /// <value> /// The schema for collection items of the serialization target collection. /// </value> internal PolymorphismSchema KeySchema { get { switch ( this.ChildrenType ) { case PolymorphismSchemaChildrenType.None: { return null; } case PolymorphismSchemaChildrenType.DictionaryKeyValues: { return this._children.FirstOrDefault(); } default: { throw new NotSupportedException(); } } } } private PolymorphismSchema TryGetKeySchema() { if ( this.ChildrenType == PolymorphismSchemaChildrenType.DictionaryKeyValues ) { return this._children.FirstOrDefault(); } else { return null; } } #if NET35 || NET40 || SILVERLIGHT || UNITY || CORE_CLR || NETSTANDARD1_1 private sealed class ReadOnlyDictionary<TKey, TValue> : IDictionary<TKey, TValue> { private readonly IDictionary<TKey, TValue> _underlying; ICollection<TKey> IDictionary<TKey, TValue>.Keys { get { return this._underlying.Keys; } } ICollection<TValue> IDictionary<TKey, TValue>.Values { get { return this._underlying.Values; } } TValue IDictionary<TKey, TValue>.this[ TKey key ] { get { return this._underlying[ key ]; } set { throw new NotSupportedException(); } } int ICollection<KeyValuePair<TKey, TValue>>.Count { get { return this._underlying.Count; } } bool ICollection<KeyValuePair<TKey, TValue>>.IsReadOnly { get { return true; } } public ReadOnlyDictionary( IDictionary<TKey, TValue> underlying ) { this._underlying = underlying; } bool IDictionary<TKey, TValue>.ContainsKey( TKey key ) { return this._underlying.ContainsKey( key ); } bool IDictionary<TKey, TValue>.TryGetValue( TKey key, out TValue value ) { return this._underlying.TryGetValue( key, out value ); } bool ICollection<KeyValuePair<TKey, TValue>>.Contains( KeyValuePair<TKey, TValue> item ) { return this._underlying.Contains( item ); } void ICollection<KeyValuePair<TKey, TValue>>.CopyTo( KeyValuePair<TKey, TValue>[] array, int arrayIndex ) { this._underlying.CopyTo( array, arrayIndex ); } IEnumerator<KeyValuePair<TKey, TValue>> IEnumerable<KeyValuePair<TKey, TValue>>.GetEnumerator() { return this._underlying.GetEnumerator(); } System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { return this._underlying.GetEnumerator(); } void IDictionary<TKey, TValue>.Add( TKey key, TValue value ) { throw new NotSupportedException(); } bool IDictionary<TKey, TValue>.Remove( TKey key ) { throw new NotSupportedException(); } void ICollection<KeyValuePair<TKey, TValue>>.Add( KeyValuePair<TKey, TValue> item ) { throw new NotSupportedException(); } void ICollection<KeyValuePair<TKey, TValue>>.Clear() { throw new NotSupportedException(); } bool ICollection<KeyValuePair<TKey, TValue>>.Remove( KeyValuePair<TKey, TValue> item ) { throw new NotSupportedException(); } } #endif // NET35 || NET40 || SILVERLIGHT || UNITY } }
/* Copyright (c) 2010 by Genstein This file is (or was originally) part of Trizbort, the Interactive Fiction Mapper. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ using System; using System.Collections.Generic; using System.Drawing; using System.Drawing.Drawing2D; using System.Windows.Forms; using System.Reflection; using System.IO; using PdfSharp.Drawing; namespace Trizbort { internal static class Drawing { static Drawing() { m_drawLineCursor = LoadCursor(Properties.Resources.DrawLineCursor); m_drawLineInvertedCursor = LoadCursor(Properties.Resources.DrawLineInvertedCursor); m_moveLineCursor = LoadCursor(Properties.Resources.MoveLineCursor); m_moveLineInvertedCursor = LoadCursor(Properties.Resources.MoveLineInvertedCursor); } private static Cursor LoadCursor(byte[] bytes) { using (var stream = new MemoryStream(bytes)) { return new Cursor(stream); } } public static Cursor DrawLineCursor { get { return Drawing.IsDark(Settings.Color[Colors.Canvas]) ? m_drawLineInvertedCursor : m_drawLineCursor; } } public static Cursor MoveLineCursor { get { return Drawing.IsDark(Settings.Color[Colors.Canvas]) ? m_moveLineInvertedCursor : m_moveLineCursor; } } public static Rectangle ToRectangle(RectangleF rect) { return new Rectangle((int)rect.X, (int)rect.Y, (int)rect.Width, (int)rect.Height); } public static void AddLine(XGraphicsPath path, LineSegment segment, Random random) { if (Settings.HandDrawn) { float dx = segment.End.X - segment.Start.X; float dy = segment.End.Y - segment.Start.Y; float distance = (float)Math.Sqrt(dx * dx + dy * dy); int points = random.Next(Math.Max(3, (int)(distance / 15)), Math.Max(6, (int)(distance / 8))); int lines = points - 1; Vector last = segment.Start; for (int line = 0; line < lines; ++line) { Vector next; if (line == 0) { next = last; } else if (line == lines - 1) { next = segment.End; } else { float fraction = (float)line / (float)(lines - 1); float x = segment.Start.X + (segment.End.X - segment.Start.X) * fraction; float y = segment.Start.Y + (segment.End.Y - segment.Start.Y) * fraction; x += random.Next(-1, 2); y += random.Next(-1, 2); next = new Vector(x, y); } path.AddLine(last.ToPointF(), next.ToPointF()); last = next; } } else { path.AddLine(segment.Start.ToPointF(), segment.End.ToPointF()); } } public static void DrawHandle(Canvas canvas, XGraphics graphics, Palette palette, Rect bounds, DrawingContext context, bool alwaysAlpha, bool round) { if (bounds.Width <= 0 || bounds.Height <= 0) { return; } using (var quality = new Smoothing(graphics, XSmoothingMode.Default)) { XBrush brush; Pen pen; var alpha = 180; if (context.Selected) { if (!alwaysAlpha) { alpha = 255; } brush = palette.Gradient(bounds, Color.FromArgb(alpha, Color.LemonChiffon), Color.FromArgb(alpha, Color.DarkOrange)); pen = palette.Pen(Color.FromArgb(alpha, Color.Chocolate), 0); } else { brush = palette.Gradient(bounds, Color.FromArgb(alpha, Color.LightCyan), Color.FromArgb(alpha, Color.SteelBlue)); pen = palette.Pen(Color.FromArgb(alpha, Color.Navy), 0); } if (round) { graphics.DrawEllipse(brush, bounds.ToRectangleF()); graphics.DrawEllipse(pen, bounds.ToRectangleF()); } else { graphics.DrawRectangle(brush, bounds.ToRectangleF()); graphics.DrawRectangle(pen, bounds.ToRectangleF()); } } } public static Color Mix(Color a, Color b, int propA, int propB) { return Color.FromArgb( (byte)(((a.R * propA) + (b.R * propB)) / (propA + propB)), (byte)(((a.G * propA) + (b.G * propB)) / (propA + propB)), (byte)(((a.B * propA) + (b.B * propB)) / (propA + propB))); } public static bool IsDark(Color color) { return Math.Max(color.R, Math.Max(color.G, color.B)) < 128; } public static PointF Subtract(PointF a, PointF b) { return new PointF(a.X - b.X, a.Y - b.Y); } public static PointF Divide(PointF pos, float scalar) { return new PointF(pos.X / scalar, pos.Y / scalar); } public static void DrawChevron(XGraphics graphics, PointF pos, float angle, float size, Brush fillBrush) { if (m_chevronPath == null) { var apex = new PointF(0.5f, 0); var leftCorner = new PointF(-0.5f, 0.5f); var rightCorner = new PointF(-0.5f, -0.5f); m_chevronPath = new XGraphicsPath(); m_chevronPath.AddLine(apex, rightCorner); m_chevronPath.AddLine(rightCorner, leftCorner); m_chevronPath.AddLine(leftCorner, apex); } var state = graphics.Save(); graphics.TranslateTransform(pos.X, pos.Y); graphics.RotateTransform(angle); graphics.ScaleTransform(size, size); graphics.DrawPath(fillBrush, m_chevronPath); graphics.Restore(state); } public static bool SetAlignmentFromCardinalOrOrdinalDirection(XStringFormat format, CompassPoint compassPoint) { switch (compassPoint) { case CompassPoint.North: case CompassPoint.NorthEast: format.LineAlignment = XLineAlignment.Far; format.Alignment = XStringAlignment.Near; break; case CompassPoint.East: case CompassPoint.SouthEast: case CompassPoint.South: format.LineAlignment = XLineAlignment.Near; format.Alignment = XStringAlignment.Near; break; case CompassPoint.West: case CompassPoint.SouthWest: format.LineAlignment = XLineAlignment.Near; format.Alignment = XStringAlignment.Far; break; case CompassPoint.NorthWest: format.LineAlignment = XLineAlignment.Far; format.Alignment = XStringAlignment.Far; break; default: return false; } return true; } public static string FontName(Font font) { if (!string.IsNullOrEmpty(font.OriginalFontName)) { return font.OriginalFontName; } return font.Name; } private static Cursor m_drawLineCursor; private static Cursor m_drawLineInvertedCursor; private static Cursor m_moveLineCursor; private static Cursor m_moveLineInvertedCursor; private static XGraphicsPath m_chevronPath; } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /****************************************************************************** * This file is auto-generated from a template file by the GenerateTests.csx * * script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make * * changes, please update the corresponding template and run according to the * * directions listed in the file. * ******************************************************************************/ using System; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.Intrinsics; using System.Runtime.Intrinsics.X86; namespace JIT.HardwareIntrinsics.X86 { public static partial class Program { private static void AndNotByte() { var test = new SimpleBinaryOpTest__AndNotByte(); if (test.IsSupported) { // Validates basic functionality works, using Unsafe.Read test.RunBasicScenario_UnsafeRead(); // Validates basic functionality works, using Load test.RunBasicScenario_Load(); // Validates basic functionality works, using LoadAligned test.RunBasicScenario_LoadAligned(); // Validates calling via reflection works, using Unsafe.Read test.RunReflectionScenario_UnsafeRead(); // Validates calling via reflection works, using Load test.RunReflectionScenario_Load(); // Validates calling via reflection works, using LoadAligned test.RunReflectionScenario_LoadAligned(); // Validates passing a static member works test.RunClsVarScenario(); // Validates passing a local works, using Unsafe.Read test.RunLclVarScenario_UnsafeRead(); // Validates passing a local works, using Load test.RunLclVarScenario_Load(); // Validates passing a local works, using LoadAligned test.RunLclVarScenario_LoadAligned(); // Validates passing the field of a local works test.RunLclFldScenario(); // Validates passing an instance member works test.RunFldScenario(); } else { // Validates we throw on unsupported hardware test.RunUnsupportedScenario(); } if (!test.Succeeded) { throw new Exception("One or more scenarios did not complete as expected."); } } } public sealed unsafe class SimpleBinaryOpTest__AndNotByte { private const int VectorSize = 32; private const int ElementCount = VectorSize / sizeof(Byte); private static Byte[] _data1 = new Byte[ElementCount]; private static Byte[] _data2 = new Byte[ElementCount]; private static Vector256<Byte> _clsVar1; private static Vector256<Byte> _clsVar2; private Vector256<Byte> _fld1; private Vector256<Byte> _fld2; private SimpleBinaryOpTest__DataTable<Byte> _dataTable; static SimpleBinaryOpTest__AndNotByte() { var random = new Random(); for (var i = 0; i < ElementCount; i++) { _data1[i] = (byte)(random.Next(0, byte.MaxValue)); _data2[i] = (byte)(random.Next(0, byte.MaxValue)); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Byte>, byte>(ref _clsVar1), ref Unsafe.As<Byte, byte>(ref _data2[0]), VectorSize); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Byte>, byte>(ref _clsVar2), ref Unsafe.As<Byte, byte>(ref _data1[0]), VectorSize); } public SimpleBinaryOpTest__AndNotByte() { Succeeded = true; var random = new Random(); for (var i = 0; i < ElementCount; i++) { _data1[i] = (byte)(random.Next(0, byte.MaxValue)); _data2[i] = (byte)(random.Next(0, byte.MaxValue)); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Byte>, byte>(ref _fld1), ref Unsafe.As<Byte, byte>(ref _data1[0]), VectorSize); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Byte>, byte>(ref _fld2), ref Unsafe.As<Byte, byte>(ref _data2[0]), VectorSize); for (var i = 0; i < ElementCount; i++) { _data1[i] = (byte)(random.Next(0, byte.MaxValue)); _data2[i] = (byte)(random.Next(0, byte.MaxValue)); } _dataTable = new SimpleBinaryOpTest__DataTable<Byte>(_data1, _data2, new Byte[ElementCount], VectorSize); } public bool IsSupported => Avx2.IsSupported; public bool Succeeded { get; set; } public void RunBasicScenario_UnsafeRead() { var result = Avx2.AndNot( Unsafe.Read<Vector256<Byte>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector256<Byte>>(_dataTable.inArray2Ptr) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_Load() { var result = Avx2.AndNot( Avx.LoadVector256((Byte*)(_dataTable.inArray1Ptr)), Avx.LoadVector256((Byte*)(_dataTable.inArray2Ptr)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_LoadAligned() { var result = Avx2.AndNot( Avx.LoadAlignedVector256((Byte*)(_dataTable.inArray1Ptr)), Avx.LoadAlignedVector256((Byte*)(_dataTable.inArray2Ptr)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_UnsafeRead() { var result = typeof(Avx2).GetMethod(nameof(Avx2.AndNot), new Type[] { typeof(Vector256<Byte>), typeof(Vector256<Byte>) }) .Invoke(null, new object[] { Unsafe.Read<Vector256<Byte>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector256<Byte>>(_dataTable.inArray2Ptr) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Byte>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_Load() { var result = typeof(Avx2).GetMethod(nameof(Avx2.AndNot), new Type[] { typeof(Vector256<Byte>), typeof(Vector256<Byte>) }) .Invoke(null, new object[] { Avx.LoadVector256((Byte*)(_dataTable.inArray1Ptr)), Avx.LoadVector256((Byte*)(_dataTable.inArray2Ptr)) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Byte>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_LoadAligned() { var result = typeof(Avx2).GetMethod(nameof(Avx2.AndNot), new Type[] { typeof(Vector256<Byte>), typeof(Vector256<Byte>) }) .Invoke(null, new object[] { Avx.LoadAlignedVector256((Byte*)(_dataTable.inArray1Ptr)), Avx.LoadAlignedVector256((Byte*)(_dataTable.inArray2Ptr)) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Byte>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunClsVarScenario() { var result = Avx2.AndNot( _clsVar1, _clsVar2 ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr); } public void RunLclVarScenario_UnsafeRead() { var left = Unsafe.Read<Vector256<Byte>>(_dataTable.inArray1Ptr); var right = Unsafe.Read<Vector256<Byte>>(_dataTable.inArray2Ptr); var result = Avx2.AndNot(left, right); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(left, right, _dataTable.outArrayPtr); } public void RunLclVarScenario_Load() { var left = Avx.LoadVector256((Byte*)(_dataTable.inArray1Ptr)); var right = Avx.LoadVector256((Byte*)(_dataTable.inArray2Ptr)); var result = Avx2.AndNot(left, right); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(left, right, _dataTable.outArrayPtr); } public void RunLclVarScenario_LoadAligned() { var left = Avx.LoadAlignedVector256((Byte*)(_dataTable.inArray1Ptr)); var right = Avx.LoadAlignedVector256((Byte*)(_dataTable.inArray2Ptr)); var result = Avx2.AndNot(left, right); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(left, right, _dataTable.outArrayPtr); } public void RunLclFldScenario() { var test = new SimpleBinaryOpTest__AndNotByte(); var result = Avx2.AndNot(test._fld1, test._fld2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } public void RunFldScenario() { var result = Avx2.AndNot(_fld1, _fld2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr); } public void RunUnsupportedScenario() { Succeeded = false; try { RunBasicScenario_UnsafeRead(); } catch (PlatformNotSupportedException) { Succeeded = true; } } private void ValidateResult(Vector256<Byte> left, Vector256<Byte> right, void* result, [CallerMemberName] string method = "") { Byte[] inArray1 = new Byte[ElementCount]; Byte[] inArray2 = new Byte[ElementCount]; Byte[] outArray = new Byte[ElementCount]; Unsafe.Write(Unsafe.AsPointer(ref inArray1[0]), left); Unsafe.Write(Unsafe.AsPointer(ref inArray2[0]), right); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Byte, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize); ValidateResult(inArray1, inArray2, outArray, method); } private void ValidateResult(void* left, void* right, void* result, [CallerMemberName] string method = "") { Byte[] inArray1 = new Byte[ElementCount]; Byte[] inArray2 = new Byte[ElementCount]; Byte[] outArray = new Byte[ElementCount]; Unsafe.CopyBlockUnaligned(ref Unsafe.As<Byte, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(left), VectorSize); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Byte, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(right), VectorSize); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Byte, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize); ValidateResult(inArray1, inArray2, outArray, method); } private void ValidateResult(Byte[] left, Byte[] right, Byte[] result, [CallerMemberName] string method = "") { if ((byte)(~left[0] & right[0]) != result[0]) { Succeeded = false; } else { for (var i = 1; i < left.Length; i++) { if ((byte)(~left[i] & right[i]) != result[i]) { Succeeded = false; break; } } } if (!Succeeded) { Console.WriteLine($"{nameof(Avx2)}.{nameof(Avx2.AndNot)}<Byte>: {method} failed:"); Console.WriteLine($" left: ({string.Join(", ", left)})"); Console.WriteLine($" right: ({string.Join(", ", right)})"); Console.WriteLine($" result: ({string.Join(", ", result)})"); Console.WriteLine(); } } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using Microsoft.CodeAnalysis.CSharp.UnitTests; using Microsoft.CodeAnalysis.EditAndContinue; using Microsoft.CodeAnalysis.Emit; using Xunit; namespace Microsoft.CodeAnalysis.CSharp.EditAndContinue.UnitTests { public class LineEditTests : RudeEditTestBase { #region Methods [Fact] public void Method_Reorder1() { string src1 = @" class C { static void Foo() { Console.ReadLine(1); } static void Bar() { Console.ReadLine(2); } } "; string src2 = @" class C { static void Bar() { Console.ReadLine(2); } static void Foo() { Console.ReadLine(1); } }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( new[] { new LineChange(4, 9), new LineChange(9, 4) }, Array.Empty<string>()); } [Fact] public void Method_Reorder2() { string src1 = @" class Program { static void Main() { Foo(); Bar(); } static int Foo() { return 1; } static int Bar() { return 2; } }"; string src2 = @" class Program { static int Foo() { return 1; } static void Main() { Foo(); Bar(); } static int Bar() { return 2; } }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( new[] { new LineChange(4, 9), new LineChange(10, 4) }, Array.Empty<string>()); } [Fact] public void Method_Update() { string src1 = @" class C { static void Bar() { Console.ReadLine(1); } } "; string src2 = @" class C { static void Bar() { Console.ReadLine(2); } }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( Array.Empty<LineChange>(), Array.Empty<string>()); } [Fact] public void Method_LineChange1() { string src1 = @" class C { static void Bar() { Console.ReadLine(2); } } "; string src2 = @" class C { static void Bar() { Console.ReadLine(2); } }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( new[] { new LineChange(4, 6) }, Array.Empty<string>()); } [Fact] public void Method_LineChange2() { string src1 = @" class C { static void Bar() { Console.ReadLine(2); } } "; string src2 = @" class C { static void Bar() { Console.ReadLine(2); } }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( new[] { new LineChange(4, 5) }, Array.Empty<string>()); } [Fact] public void Method_Recompile1() { string src1 = @" class C { static void Bar() { Console.ReadLine(2); } } "; string src2 = @" class C { static void Bar() { /**/Console.ReadLine(2); } }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( Array.Empty<LineChange>(), new string[] { "static void Bar()" }); } [Fact] public void Method_Recompile2() { string src1 = @" class C { static void Bar() { Console.ReadLine(2); } } "; string src2 = @" class C { static void Bar() { Console.ReadLine(2); } }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( Array.Empty<LineChange>(), new string[] { "static void Bar()" }); } [Fact] public void Method_Recompile3() { string src1 = @" class C { static void Bar() /*1*/ { Console.ReadLine(2); } } "; string src2 = @" class C { static void Bar() { /*2*/ Console.ReadLine(2); } }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( Array.Empty<LineChange>(), new string[] { "static void Bar()" }); } [Fact] public void Method_Recompile4() { string src1 = @" class C { static void Bar() { int <N:0.0>a = 1</N:0.0>; int <N:0.1>b = 2</N:0.1>; <AS:0>System.Console.WriteLine(1);</AS:0> } } "; string src2 = @" class C { static void Bar() { int <N:0.0>a = 1</N:0.0>; int <N:0.1>b = 2</N:0.1>; <AS:0>System.Console.WriteLine(1);</AS:0> } }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( Array.Empty<LineChange>(), new string[] { "static void Bar()" }); var active = GetActiveStatements(src1, src2); var syntaxMap = GetSyntaxMap(src1, src2); edits.VerifySemantics( active, new[] { SemanticEdit(SemanticEditKind.Update, c => c.GetMember("C.Bar"), syntaxMap[0]) }); } [Fact] public void Method_Recompile5() { string src1 = @" class C { static void Bar() { } } "; string src2 = @" class C { /*--*/static void Bar() { } }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( Array.Empty<LineChange>(), new string[] { "static void Bar() { }" }); } [Fact] public void Method_RudeRecompile1() { string src1 = @" class C<T> { static void Bar() { /*edit*/ Console.ReadLine(2); } } "; string src2 = @" class C<T> { static void Bar() { Console.ReadLine(2); } }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( Array.Empty<LineChange>(), new string[] { "static void Bar()" }, Diagnostic(RudeEditKind.GenericTypeTriviaUpdate, "\r\n ", FeaturesResources.method)); } [Fact] public void Method_RudeRecompile2() { string src1 = @" class C<T> { static void Bar() { Console.ReadLine(2); } } "; string src2 = @" class C<T> { static void Bar() { /*edit*/Console.ReadLine(2); } }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( Array.Empty<LineChange>(), new string[] { "static void Bar()" }, Diagnostic(RudeEditKind.GenericTypeTriviaUpdate, "\r\n /*edit*/", FeaturesResources.method)); } [Fact] public void Method_RudeRecompile3() { string src1 = @" class C { static void Bar<T>() { /*edit*/Console.ReadLine(2); } } "; string src2 = @" class C { static void Bar<T>() { Console.ReadLine(2); } }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( Array.Empty<LineChange>(), new string[] { "static void Bar<T>()" }, Diagnostic(RudeEditKind.GenericMethodTriviaUpdate, "\r\n ", FeaturesResources.method)); } [Fact] public void Method_RudeRecompile4() { string src1 = @" class C { static async Task<int> Bar() { Console.ReadLine(2); } } "; string src2 = @" class C { static async Task<int> Bar() { Console.ReadLine( 2); } }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( Array.Empty<LineChange>(), new string[] { "static async Task<int> Bar()" }); } #endregion #region Constructors [Fact] public void Constructor_Reorder() { string src1 = @" class C { public C(int a) { } public C(bool a) { } } "; string src2 = @" class C { public C(bool a) { } public C(int a) { } }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( new[] { new LineChange(4, 8), new LineChange(8, 4) }, Array.Empty<string>()); } [Fact] public void Constructor_LineChange1() { string src1 = @" class C { public C(int a) : base() { } } "; string src2 = @" class C { public C(int a) : base() { } }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( new[] { new LineChange(4, 5) }, Array.Empty<string>()); } [Fact] public void Constructor_Recompile1() { string src1 = @" class C { public C(int a) : base() { } } "; string src2 = @" class C { public C(int a) : base() { } }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( Array.Empty<LineChange>(), new string[] { "public C(int a)" }); } [Fact] public void Constructor_Recompile2() { string src1 = @" class C { public C(int a) : base() { } } "; string src2 = @" class C { public C(int a) : base() { } }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( Array.Empty<LineChange>(), new string[] { "public C(int a)" }); } [Fact] public void Constructor_RudeRecompile1() { string src1 = @" class C<T> { public C(int a) : base() { } } "; string src2 = @" class C<T> { public C(int a) : base() { } }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( Array.Empty<LineChange>(), new string[] { "public C(int a)" }, Diagnostic(RudeEditKind.GenericTypeTriviaUpdate, " ", FeaturesResources.constructor)); } #endregion #region Field Initializers [Fact] public void ConstantField() { string src1 = @" class C { const int Foo = 1; } "; string src2 = @" class C { const int Foo = 1; }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( Array.Empty<LineChange>(), Array.Empty<string>()); } [Fact] public void NoInitializer() { string src1 = @" class C { int Foo; } "; string src2 = @" class C { int Foo; }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( Array.Empty<LineChange>(), Array.Empty<string>()); } [Fact] public void Field_Reorder() { string src1 = @" class C { static int Foo = 1; static int Bar = 2; } "; string src2 = @" class C { static int Bar = 2; static int Foo = 1; }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( new[] { new LineChange(3, 4), new LineChange(4, 3) }, Array.Empty<string>()); } [Fact] public void Field_LineChange1() { string src1 = @" class C { static int Foo = 1; } "; string src2 = @" class C { static int Foo = 1; }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( new[] { new LineChange(3, 6) }, Array.Empty<string>()); } [Fact] public void Field_LineChange2() { string src1 = @" class C { int Foo = 1, Bar = 2; } "; string src2 = @" class C { int Foo = 1, Bar = 2; }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( new LineChange[] { new LineChange(3, 4) }, Array.Empty<string>()); } [Fact] public void Field_LineChange3() { string src1 = @" class C { [A]static int Foo = 1, Bar = 2; } "; string src2 = @" class C { [A] static int Foo = 1, Bar = 2; }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( new LineChange[] { new LineChange(3, 4), new LineChange(3, 4) }, Array.Empty<string>()); } [Fact] public void Field_Recompile1a() { string src1 = @" class C { static int Foo = 1; } "; string src2 = @" class C { static int Foo = 1; }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( Array.Empty<LineChange>(), new string[] { "Foo = " }); } [Fact] public void Field_Recompile1b() { string src1 = @" class C { static int Foo = 1; } "; string src2 = @" class C { static int Foo = 1; }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( Array.Empty<LineChange>(), new string[] { "Foo " }); } [Fact] public void Field_Recompile1c() { string src1 = @" class C { static int Foo = 1; } "; string src2 = @" class C { static int Foo = 1; }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( Array.Empty<LineChange>(), new string[] { "Foo = 1" }); } [Fact] public void Field_Recompile1d() { string src1 = @" class C { static int Foo = 1; } "; string src2 = @" class C { static int Foo = 1; }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( Array.Empty<LineChange>(), new string[] { "Foo = 1" }); } [Fact] public void Field_Recompile1e() { string src1 = @" class C { static int Foo = 1; } "; string src2 = @" class C { static int Foo = 1 ; }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( Array.Empty<LineChange>(), new string[] { "Foo = 1" }); } [Fact] public void Field_Recompile2() { string src1 = @" class C { static int Foo = 1 + 1; } "; string src2 = @" class C { static int Foo = 1 + 1; }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( Array.Empty<LineChange>(), new string[] { "Foo = 1 + 1" }); } [Fact] public void Field_RudeRecompile2() { string src1 = @" class C<T> { static int Foo = 1 + 1; } "; string src2 = @" class C<T> { static int Foo = 1 + 1; }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( Array.Empty<LineChange>(), new string[] { "Foo = 1 + 1" }, Diagnostic(RudeEditKind.GenericTypeTriviaUpdate, " ", FeaturesResources.field)); } #endregion #region Properties [Fact] public void Property1() { string src1 = @" class C { int P { get { return 1; } } } "; string src2 = @" class C { int P { get { return 1; } } }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( Array.Empty<LineChange>(), new string[] { "get { return " }); } [Fact] public void Property2() { string src1 = @" class C { int P { get { return 1; } } } "; string src2 = @" class C { int P { get { return 1; } } }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( new[] { new LineChange(3, 4) }, Array.Empty<string>()); } [Fact] public void Property3() { string src1 = @" class C { int P { get { return 1; } set { } } } "; string src2 = @" class C { int P { get { return 1; } set { } } }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( new[] { new LineChange(3, 4), new LineChange(3, 4) }, Array.Empty<string>()); } [Fact] public void Property_ExpressionBody1() { string src1 = @" class C { int P => 1; } "; string src2 = @" class C { int P => 1; }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( new[] { new LineChange(3, 4) }, Array.Empty<string>()); } [Fact] public void Property_Initializer1() { string src1 = @" class C { int P { get; } = 1; } "; string src2 = @" class C { int P { get; } = 1; }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( new[] { new LineChange(3, 4) }, Array.Empty<string>()); } [Fact] public void Property_Initializer2() { string src1 = @" class C { int P { get; } = 1; } "; string src2 = @" class C { int P { get; } = 1; }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( new[] { new LineChange(3, 4) }, Array.Empty<string>()); } [Fact] public void Property_Initializer3() { string src1 = @" class C { int P { get; } = 1; } "; string src2 = @" class C { int P { get; } = 1; }"; var edits = GetTopEdits(src1, src2); edits.VerifyLineEdits( Array.Empty<LineChange>(), new string[] { "int P { get; } = 1;" }); } #endregion } }
/* * [The "BSD license"] * Copyright (c) 2013 Terence Parr * Copyright (c) 2013 Sam Harwell * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. The name of the author may not be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Text; using Antlr4.Runtime; using Antlr4.Runtime.Misc; using Antlr4.Runtime.Sharpen; namespace Antlr4.Runtime { public class UnbufferedTokenStream : ITokenStream { private ITokenSource _tokenSource; /// <summary>A moving window buffer of the data being scanned.</summary> /// <remarks> /// A moving window buffer of the data being scanned. While there's a marker, /// we keep adding to buffer. Otherwise, /// <see cref="Consume()">consume()</see> /// resets so /// we start filling at index 0 again. /// </remarks> protected internal IToken[] tokens; /// <summary> /// The number of tokens currently in /// <see cref="tokens">tokens</see> /// . /// <p>This is not the buffer capacity, that's /// <c>tokens.length</c> /// .</p> /// </summary> protected internal int n; /// <summary> /// 0..n-1 index into /// <see cref="tokens">tokens</see> /// of next token. /// <p>The /// <c>LT(1)</c> /// token is /// <c>tokens[p]</c> /// . If /// <c>p == n</c> /// , we are /// out of buffered tokens.</p> /// </summary> protected internal int p = 0; /// <summary> /// Count up with /// <see cref="Mark()">mark()</see> /// and down with /// <see cref="Release(int)">release()</see> /// . When we /// <c>release()</c> /// the last mark, /// <c>numMarkers</c> /// reaches 0 and we reset the buffer. Copy /// <c>tokens[p]..tokens[n-1]</c> /// to /// <c>tokens[0]..tokens[(n-1)-p]</c> /// . /// </summary> protected internal int numMarkers = 0; /// <summary> /// This is the /// <c>LT(-1)</c> /// token for the current position. /// </summary> protected internal IToken lastToken; /// <summary> /// When /// <c>numMarkers &gt; 0</c> /// , this is the /// <c>LT(-1)</c> /// token for the /// first token in /// <see cref="tokens"/> /// . Otherwise, this is /// <see langword="null"/> /// . /// </summary> protected internal IToken lastTokenBufferStart; /// <summary>Absolute token index.</summary> /// <remarks> /// Absolute token index. It's the index of the token about to be read via /// <c>LT(1)</c> /// . Goes from 0 to the number of tokens in the entire stream, /// although the stream size is unknown before the end is reached. /// <p>This value is used to set the token indexes if the stream provides tokens /// that implement /// <see cref="IWritableToken"/> /// .</p> /// </remarks> protected internal int currentTokenIndex = 0; public UnbufferedTokenStream(ITokenSource tokenSource) : this(tokenSource, 256) { } public UnbufferedTokenStream(ITokenSource tokenSource, int bufferSize) { this.TokenSource = tokenSource; this.tokens = new IToken[bufferSize]; n = 0; Fill(1); } // prime the pump public virtual IToken Get(int i) { int bufferStartIndex = GetBufferStartIndex(); if (i < bufferStartIndex || i >= bufferStartIndex + n) { throw new ArgumentOutOfRangeException("get(" + i + ") outside buffer: " + bufferStartIndex + ".." + (bufferStartIndex + n)); } return tokens[i - bufferStartIndex]; } public virtual IToken Lt(int i) { if (i == -1) { return lastToken; } Sync(i); int index = p + i - 1; if (index < 0) { throw new ArgumentOutOfRangeException("LT(" + i + ") gives negative index"); } if (index >= n) { System.Diagnostics.Debug.Assert(n > 0 && tokens[n - 1].Type == TokenConstants.Eof); return tokens[n - 1]; } return tokens[index]; } public virtual int La(int i) { return Lt(i).Type; } public virtual ITokenSource TokenSource { get { return _tokenSource; } set { _tokenSource = value; } } [return: NotNull] public virtual string GetText() { return string.Empty; } [return: NotNull] public virtual string GetText(RuleContext ctx) { return GetText(ctx.SourceInterval); } [return: NotNull] public virtual string GetText(IToken start, IToken stop) { if (start != null && stop != null) { return GetText(Interval.Of(start.TokenIndex, stop.TokenIndex)); } throw new NotSupportedException("The specified start and stop symbols are not supported."); } public virtual void Consume() { if (La(1) == TokenConstants.Eof) { throw new InvalidOperationException("cannot consume EOF"); } // buf always has at least tokens[p==0] in this method due to ctor lastToken = tokens[p]; // track last token for LT(-1) // if we're at last token and no markers, opportunity to flush buffer if (p == n - 1 && numMarkers == 0) { n = 0; p = -1; // p++ will leave this at 0 lastTokenBufferStart = lastToken; } p++; currentTokenIndex++; Sync(1); } /// <summary> /// Make sure we have 'need' elements from current position /// <see cref="p">p</see> /// . Last valid /// <c>p</c> /// index is /// <c>tokens.length-1</c> /// . /// <c>p+need-1</c> /// is the tokens index 'need' elements /// ahead. If we need 1 element, /// <c>(p+1-1)==p</c> /// must be less than /// <c>tokens.length</c> /// . /// </summary> protected internal virtual void Sync(int want) { int need = (p + want - 1) - n + 1; // how many more elements we need? if (need > 0) { Fill(need); } } /// <summary> /// Add /// <paramref name="n"/> /// elements to the buffer. Returns the number of tokens /// actually added to the buffer. If the return value is less than /// <paramref name="n"/> /// , /// then EOF was reached before /// <paramref name="n"/> /// tokens could be added. /// </summary> protected internal virtual int Fill(int n) { for (int i = 0; i < n; i++) { if (this.n > 0 && tokens[this.n - 1].Type == TokenConstants.Eof) { return i; } IToken t = TokenSource.NextToken(); Add(t); } return n; } protected internal virtual void Add(IToken t) { if (n >= tokens.Length) { tokens = Arrays.CopyOf(tokens, tokens.Length * 2); } if (t is IWritableToken) { ((IWritableToken)t).TokenIndex = GetBufferStartIndex() + n; } tokens[n++] = t; } /// <summary>Return a marker that we can release later.</summary> /// <remarks> /// Return a marker that we can release later. /// <p>The specific marker value used for this class allows for some level of /// protection against misuse where /// <c>seek()</c> /// is called on a mark or /// <c>release()</c> /// is called in the wrong order.</p> /// </remarks> public virtual int Mark() { if (numMarkers == 0) { lastTokenBufferStart = lastToken; } int mark = -numMarkers - 1; numMarkers++; return mark; } public virtual void Release(int marker) { int expectedMark = -numMarkers; if (marker != expectedMark) { throw new InvalidOperationException("release() called with an invalid marker."); } numMarkers--; if (numMarkers == 0) { // can we release buffer? if (p > 0) { // Copy tokens[p]..tokens[n-1] to tokens[0]..tokens[(n-1)-p], reset ptrs // p is last valid token; move nothing if p==n as we have no valid char System.Array.Copy(tokens, p, tokens, 0, n - p); // shift n-p tokens from p to 0 n = n - p; p = 0; } lastTokenBufferStart = lastToken; } } public virtual int Index { get { return currentTokenIndex; } } public virtual void Seek(int index) { // seek to absolute index if (index == currentTokenIndex) { return; } if (index > currentTokenIndex) { Sync(index - currentTokenIndex); index = Math.Min(index, GetBufferStartIndex() + n - 1); } int bufferStartIndex = GetBufferStartIndex(); int i = index - bufferStartIndex; if (i < 0) { throw new ArgumentException("cannot seek to negative index " + index); } else { if (i >= n) { throw new NotSupportedException("seek to index outside buffer: " + index + " not in " + bufferStartIndex + ".." + (bufferStartIndex + n)); } } p = i; currentTokenIndex = index; if (p == 0) { lastToken = lastTokenBufferStart; } else { lastToken = tokens[p - 1]; } } public virtual int Size { get { throw new NotSupportedException("Unbuffered stream cannot know its size"); } } public virtual string SourceName { get { return TokenSource.SourceName; } } [return: NotNull] public virtual string GetText(Interval interval) { int bufferStartIndex = GetBufferStartIndex(); int bufferStopIndex = bufferStartIndex + tokens.Length - 1; int start = interval.a; int stop = interval.b; if (start < bufferStartIndex || stop > bufferStopIndex) { throw new NotSupportedException("interval " + interval + " not in token buffer window: " + bufferStartIndex + ".." + bufferStopIndex); } int a = start - bufferStartIndex; int b = stop - bufferStartIndex; StringBuilder buf = new StringBuilder(); for (int i = a; i <= b; i++) { IToken t = tokens[i]; buf.Append(t.Text); } return buf.ToString(); } protected internal int GetBufferStartIndex() { return currentTokenIndex - p; } } }
using Abp.Domain.Entities; using Abp.Domain.Entities.Auditing; using Abp.Domain.Repositories; using Abp.EntityHistory; using Abp.Events.Bus.Entities; using Abp.Extensions; using Abp.Json; using Abp.Threading; using Abp.Timing; using Abp.Zero.SampleApp.EntityHistory; using Castle.MicroKernel.Registration; using NSubstitute; using Shouldly; using System; using System.Collections.Generic; using System.Data.Entity; using System.Data.Entity.Infrastructure; using System.Linq; using System.Text; using System.Threading; using Abp.Application.Editions; using Abp.Application.Features; using Abp.Authorization.Roles; using Abp.Zero.SampleApp.TPH; using Xunit; namespace Abp.Zero.SampleApp.Tests.EntityHistory { public class SimpleEntityHistory_Test : SampleAppTestBase { private readonly IRepository<Advertisement> _advertisementRepository; private readonly IRepository<Blog> _blogRepository; private readonly IRepository<Post, Guid> _postRepository; private readonly IRepository<Comment> _commentRepository; private readonly IRepository<Student> _studentRepository; private readonly IRepository<Foo> _fooRepository; private IEntityHistoryStore _entityHistoryStore; public SimpleEntityHistory_Test() { _advertisementRepository = Resolve<IRepository<Advertisement>>(); _blogRepository = Resolve<IRepository<Blog>>(); _postRepository = Resolve<IRepository<Post, Guid>>(); _commentRepository = Resolve<IRepository<Comment>>(); _studentRepository = Resolve<IRepository<Student>>(); _fooRepository = Resolve<IRepository<Foo>>(); var user = GetDefaultTenantAdmin(); AbpSession.TenantId = user.TenantId; AbpSession.UserId = user.Id; Resolve<IEntityHistoryConfiguration>().IsEnabledForAnonymousUsers = true; } protected override void PreInitialize() { base.PreInitialize(); _entityHistoryStore = Substitute.For<IEntityHistoryStore>(); LocalIocManager.IocContainer.Register( Component.For<IEntityHistoryStore>().Instance(_entityHistoryStore).LifestyleSingleton() ); } #region CASES WRITE HISTORY [Fact] public void Should_Write_History_For_Tracked_Entities_Create() { /* Advertisement does not have Audited attribute. */ Resolve<IEntityHistoryConfiguration>().Selectors.Add("Selected", typeof(Advertisement)); int? advertisementId = null; WithUnitOfWork(() => { var advertisement = new Advertisement {Banner = "tracked-advertisement"}; advertisementId = _advertisementRepository.InsertAndGetId(advertisement); }); Predicate<EntityChangeSet> predicate = s => { s.EntityChanges.Count.ShouldBe(1); var entityChange = s.EntityChanges.Single(ec => ec.EntityTypeFullName == typeof(Advertisement).FullName); ((DateTime?)entityChange.ChangeTime).ShouldNotBe(null); entityChange.ChangeType.ShouldBe(EntityChangeType.Created); entityChange.EntityId.ShouldBe(advertisementId.ToJsonString()); entityChange.PropertyChanges.Count.ShouldBe(1); var propertyChange1 = entityChange.PropertyChanges.Single(pc => pc.PropertyName == nameof(Advertisement.Banner)); propertyChange1.OriginalValue.ShouldBeNull(); propertyChange1.NewValue.ShouldNotBeNull(); // Check "who did this change" s.ImpersonatorTenantId.ShouldBe(AbpSession.ImpersonatorTenantId); s.ImpersonatorUserId.ShouldBe(AbpSession.ImpersonatorUserId); s.TenantId.ShouldBe(AbpSession.TenantId); s.UserId.ShouldBe(AbpSession.UserId); return true; }; _entityHistoryStore.Received().Save(Arg.Is<EntityChangeSet>(s => predicate(s))); } [Fact] public void Should_Write_History_For_Tracked_Entities_Create_To_Database() { // Forward calls from substitute to implementation var entityHistoryStore = Resolve<EntityHistoryStore>(); _entityHistoryStore.When(x => x.SaveAsync(Arg.Any<EntityChangeSet>())) .Do(callback => entityHistoryStore.SaveAsync(callback.Arg<EntityChangeSet>())); _entityHistoryStore.When(x => x.Save(Arg.Any<EntityChangeSet>())) .Do(callback => entityHistoryStore.Save(callback.Arg<EntityChangeSet>())); UsingDbContext((context) => { context.EntityChanges.Count(e => e.TenantId == 1).ShouldBe(0); context.EntityChangeSets.Count(e => e.TenantId == 1).ShouldBe(0); context.EntityPropertyChanges.Count(e => e.TenantId == 1).ShouldBe(0); }); /* Advertisement does not have Audited attribute. */ Resolve<IEntityHistoryConfiguration>().Selectors.Add("Selected", typeof(Advertisement)); var justNow = Clock.Now; Thread.Sleep(1); WithUnitOfWork(() => { _advertisementRepository.InsertAndGetId(new Advertisement {Banner = "tracked-advertisement"}); }); UsingDbContext((context) => { context.EntityChanges.Count(e => e.TenantId == 1).ShouldBe(1); context.EntityChangeSets.Count(e => e.TenantId == 1).ShouldBe(1); context.EntityChangeSets.Single().CreationTime.ShouldBeGreaterThan(justNow); context.EntityPropertyChanges.Count(e => e.TenantId == 1).ShouldBe(1); }); } [Fact] public void Should_Write_History_For_TPH_Tracked_Entities_Create() { Resolve<IEntityHistoryConfiguration>().Selectors.Add("Selected", typeof(Student)); var student = new Student { Name = "TestName", IdCard = "TestIdCard", Address = "TestAddress", Grade = 1 }; _studentRepository.Insert(student); Predicate<EntityChangeSet> predicate = s => { s.EntityChanges.Count.ShouldBe(1); var entityChange = s.EntityChanges.Single(ec => ec.EntityTypeFullName == typeof(Student).FullName); ((DateTime?)entityChange.ChangeTime).ShouldNotBe(null); entityChange.ChangeType.ShouldBe(EntityChangeType.Created); entityChange.EntityId.ShouldBe(student.Id.ToJsonString()); entityChange.PropertyChanges.Count.ShouldBe(4); //Name,IdCard,Address,Grade var propertyChange1 = entityChange.PropertyChanges.Single(pc => pc.PropertyName == nameof(Student.Name)); propertyChange1.OriginalValue.ShouldBeNull(); propertyChange1.NewValue.ShouldNotBeNull(); var propertyChange2 = entityChange.PropertyChanges.Single(pc => pc.PropertyName == nameof(Student.IdCard)); propertyChange2.OriginalValue.ShouldBeNull(); propertyChange2.NewValue.ShouldNotBeNull(); var propertyChange3 = entityChange.PropertyChanges.Single(pc => pc.PropertyName == nameof(Student.Address)); propertyChange3.OriginalValue.ShouldBeNull(); propertyChange3.NewValue.ShouldNotBeNull(); var propertyChange4 = entityChange.PropertyChanges.Single(pc => pc.PropertyName == nameof(Student.Grade)); propertyChange4.OriginalValue.ShouldBeNull(); propertyChange4.NewValue.ShouldNotBeNull(); // Check "who did this change" s.ImpersonatorTenantId.ShouldBe(AbpSession.ImpersonatorTenantId); s.ImpersonatorUserId.ShouldBe(AbpSession.ImpersonatorUserId); s.TenantId.ShouldBe(AbpSession.TenantId); s.UserId.ShouldBe(AbpSession.UserId); return true; }; _entityHistoryStore.Received().Save(Arg.Is<EntityChangeSet>(s => predicate(s))); } [Fact] public void Should_Write_History_For_TPH_Tracked_Entities_Create_To_Database() { // Forward calls from substitute to implementation var entityHistoryStore = Resolve<EntityHistoryStore>(); _entityHistoryStore.When(x => x.SaveAsync(Arg.Any<EntityChangeSet>())) .Do(callback => entityHistoryStore.SaveAsync(callback.Arg<EntityChangeSet>())); _entityHistoryStore.When(x => x.Save(Arg.Any<EntityChangeSet>())) .Do(callback => entityHistoryStore.Save(callback.Arg<EntityChangeSet>())); UsingDbContext((context) => { context.EntityChanges.Count(e => e.TenantId == 1).ShouldBe(0); context.EntityChangeSets.Count(e => e.TenantId == 1).ShouldBe(0); context.EntityPropertyChanges.Count(e => e.TenantId == 1).ShouldBe(0); }); Resolve<IEntityHistoryConfiguration>().Selectors.Add("Selected", typeof(Student)); var justNow = Clock.Now; Thread.Sleep(1); var student = new Student() { Name = "TestName", IdCard = "TestIdCard", Address = "TestAddress", Grade = 1 }; _studentRepository.Insert(student); UsingDbContext((context) => { context.EntityChanges.Count(e => e.TenantId == 1).ShouldBe(1); context.EntityChangeSets.Count(e => e.TenantId == 1).ShouldBe(1); context.EntityChangeSets.Single().CreationTime.ShouldBeGreaterThan(justNow); context.EntityPropertyChanges.Count(e => e.TenantId == 1).ShouldBe(4); //Name,IdCard,Address,Grade }); } [Fact] public void Should_Write_History_For_Tracked_Entities_Update() { /* Advertisement does not have Audited attribute. */ Resolve<IEntityHistoryConfiguration>().Selectors.Add("Selected", typeof(Advertisement)); WithUnitOfWork(() => { var advertisement1 = _advertisementRepository.Single(a => a.Banner == "test-advertisement-1"); advertisement1.Banner = "test-advertisement-1-updated"; _advertisementRepository.Update(advertisement1); }); Predicate<EntityChangeSet> predicate = s => { s.EntityChanges.Count.ShouldBe(1); var entityChange = s.EntityChanges.Single(ec => ec.EntityTypeFullName == typeof(Advertisement).FullName); entityChange.ChangeType.ShouldBe(EntityChangeType.Updated); entityChange.EntityId.ShouldBe(entityChange.EntityEntry.As<DbEntityEntry>().Entity.As<IEntity>().Id .ToJsonString()); entityChange.PropertyChanges.Count.ShouldBe(1); var propertyChange = entityChange.PropertyChanges.Single(pc => pc.PropertyName == nameof(Advertisement.Banner)); propertyChange.NewValue.ShouldBe("test-advertisement-1-updated".ToJsonString()); propertyChange.OriginalValue.ShouldBe("test-advertisement-1".ToJsonString()); propertyChange.PropertyTypeFullName.ShouldBe(typeof(Advertisement) .GetProperty(nameof(Advertisement.Banner)).PropertyType.FullName); return true; }; _entityHistoryStore.Received().Save(Arg.Is<EntityChangeSet>(s => predicate(s))); } [Fact] public void Should_Write_History_For_Audited_Entities_Create() { /* Blog has Audited attribute. */ var blog2Id = CreateBlogAndGetId(); Predicate<EntityChangeSet> predicate = s => { s.EntityChanges.Count.ShouldBe(1); var entityChange = s.EntityChanges.Single(ec => ec.EntityTypeFullName == typeof(Blog).FullName); entityChange.ChangeTime.ShouldBe(entityChange.EntityEntry.As<DbEntityEntry>().Entity .As<IHasCreationTime>().CreationTime); entityChange.ChangeType.ShouldBe(EntityChangeType.Created); entityChange.EntityId.ShouldBe(blog2Id.ToJsonString()); entityChange.PropertyChanges.Count.ShouldBe(3); var propertyChange1 = entityChange.PropertyChanges.Single(pc => pc.PropertyName == nameof(Blog.Url)); propertyChange1.OriginalValue.ShouldBeNull(); propertyChange1.NewValue.ShouldNotBeNull(); var propertyChange2 = entityChange.PropertyChanges.Single(pc => pc.PropertyName == nameof(Blog.More)); propertyChange2.OriginalValue.ShouldBeNull(); propertyChange2.NewValue.ShouldNotBeNull(); var propertyChange3 = entityChange.PropertyChanges.Single(pc => pc.PropertyName == nameof(Blog.CreationTime)); propertyChange3.OriginalValue.ShouldBeNull(); propertyChange3.NewValue.ShouldNotBeNull(); // Check "who did this change" s.ImpersonatorTenantId.ShouldBe(AbpSession.ImpersonatorTenantId); s.ImpersonatorUserId.ShouldBe(AbpSession.ImpersonatorUserId); s.TenantId.ShouldBe(AbpSession.TenantId); s.UserId.ShouldBe(AbpSession.UserId); return true; }; _entityHistoryStore.Received().Save(Arg.Is<EntityChangeSet>(s => predicate(s))); } [Fact] public void Should_Write_History_For_Audited_Entities_Create_To_Database() { // Forward calls from substitute to implementation var entityHistoryStore = Resolve<EntityHistoryStore>(); _entityHistoryStore.When(x => x.SaveAsync(Arg.Any<EntityChangeSet>())) .Do(callback => entityHistoryStore.SaveAsync(callback.Arg<EntityChangeSet>())); _entityHistoryStore.When(x => x.Save(Arg.Any<EntityChangeSet>())) .Do(callback => entityHistoryStore.Save(callback.Arg<EntityChangeSet>())); UsingDbContext((context) => { context.EntityChanges.Count(e => e.TenantId == 1).ShouldBe(0); context.EntityChangeSets.Count(e => e.TenantId == 1).ShouldBe(0); context.EntityPropertyChanges.Count(e => e.TenantId == 1).ShouldBe(0); }); var justNow = Clock.Now; Thread.Sleep(1); var blog2Id = CreateBlogAndGetId(); UsingDbContext((context) => { context.EntityChanges.Count(e => e.TenantId == 1).ShouldBe(1); context.EntityChangeSets.Count(e => e.TenantId == 1).ShouldBe(1); context.EntityChangeSets.Single().CreationTime.ShouldBeGreaterThan(justNow); context.EntityPropertyChanges.Count(e => e.TenantId == 1).ShouldBe(3); }); } [Fact] public void Should_Write_History_For_Audited_Entities_Update() { /* Blog has Audited attribute. */ var newValue = "http://testblog1-changed.myblogs.com"; var originalValue = UpdateBlogUrlAndGetOriginalValue(newValue); Predicate<EntityChangeSet> predicate = s => { s.EntityChanges.Count.ShouldBe(1); var entityChange = s.EntityChanges.Single(ec => ec.EntityTypeFullName == typeof(Blog).FullName); entityChange.ChangeType.ShouldBe(EntityChangeType.Updated); entityChange.EntityId.ShouldBe(entityChange.EntityEntry.As<DbEntityEntry>().Entity.As<IEntity>().Id .ToJsonString()); entityChange.PropertyChanges.Count.ShouldBe(1); var propertyChange = entityChange.PropertyChanges.Single(pc => pc.PropertyName == nameof(Blog.Url)); propertyChange.NewValue.ShouldBe(newValue.ToJsonString()); propertyChange.OriginalValue.ShouldBe(originalValue.ToJsonString()); propertyChange.PropertyTypeFullName.ShouldBe(typeof(Blog).GetProperty(nameof(Blog.Url)).PropertyType .FullName); return true; }; _entityHistoryStore.Received().Save(Arg.Is<EntityChangeSet>(s => predicate(s))); } [Fact] public void Should_Write_History_For_Audited_Entities_Update_Only_Modified_Properties() { var originalValue = "http://testblog2.myblogs.com"; var newValue = "http://testblog2-changed.myblogs.com"; WithUnitOfWork(() => { var blog2 = _blogRepository.Single(b => b.Url == originalValue); // Update only the Url of the Blog blog2.ChangeUrl(newValue); _blogRepository.Update(blog2); }); Predicate<EntityChangeSet> predicate = s => { s.EntityChanges.Count.ShouldBe(1); var entityChange = s.EntityChanges.Single(ec => ec.EntityTypeFullName == typeof(Blog).FullName); entityChange.ChangeType.ShouldBe(EntityChangeType.Updated); entityChange.EntityId.ShouldBe(entityChange.EntityEntry.As<DbEntityEntry>().Entity.As<IEntity>().Id .ToJsonString()); entityChange.PropertyChanges.Count.ShouldBe(1); var propertyChange = entityChange.PropertyChanges.Single(pc => pc.PropertyName == nameof(Blog.Url)); propertyChange.NewValue.ShouldBe(newValue.ToJsonString()); propertyChange.OriginalValue.ShouldBe(originalValue.ToJsonString()); propertyChange.PropertyTypeFullName.ShouldBe(typeof(Blog).GetProperty(nameof(Blog.Url)).PropertyType .FullName); return true; }; _entityHistoryStore.Received().Save(Arg.Is<EntityChangeSet>(s => predicate(s))); } [Fact] public void Should_Write_History_For_Audited_Entities_Update_Complex() { /* Blog has Audited attribute. */ int blog1Id = 0; var newValue = new BlogEx {BloggerName = "blogger-2"}; BlogEx originalValue = null; WithUnitOfWork(() => { var blog1 = _blogRepository.Single(b => b.More.BloggerName == "blogger-1"); blog1Id = blog1.Id; originalValue = new BlogEx {BloggerName = blog1.More.BloggerName}; blog1.More.BloggerName = newValue.BloggerName; _blogRepository.Update(blog1); }); Predicate<EntityChangeSet> predicate = s => { s.EntityChanges.Count.ShouldBe(1); var entityChange = s.EntityChanges.Single(ec => ec.EntityTypeFullName == typeof(Blog).FullName); entityChange.ChangeType.ShouldBe(EntityChangeType.Updated); entityChange.EntityId.ShouldBe(blog1Id.ToJsonString()); entityChange.PropertyChanges.Count.ShouldBe(1); var propertyChange = entityChange.PropertyChanges.Single(pc => pc.PropertyName == nameof(Blog.More)); propertyChange.NewValue.ShouldBe(newValue.ToJsonString()); propertyChange.OriginalValue.ShouldBe(originalValue.ToJsonString()); propertyChange.PropertyTypeFullName.ShouldBe(typeof(Blog).GetProperty(nameof(Blog.More)).PropertyType .FullName); return true; }; _entityHistoryStore.Received().Save(Arg.Is<EntityChangeSet>(s => predicate(s))); } [Fact] public void Should_Write_History_For_Audited_Property_Foreign_Key() { /* Post.BlogId has Audited attribute. */ var blogId = CreateBlogAndGetId(); Guid post1Id = Guid.Empty; WithUnitOfWork(() => { var blog2 = _blogRepository.Single(b => b.Id == 2); var post1 = _postRepository.Single(p => p.Body == "test-post-1-body"); post1Id = post1.Id; // Change foreign key by assigning navigation property post1.Blog = blog2; _postRepository.Update(post1); }); Predicate<EntityChangeSet> predicate = s => { s.EntityChanges.Count.ShouldBe(1); var entityChange = s.EntityChanges.Single(ec => ec.EntityTypeFullName == typeof(Post).FullName); entityChange.ChangeType.ShouldBe(EntityChangeType.Updated); entityChange.EntityId.ShouldBe(post1Id.ToJsonString()); entityChange.PropertyChanges.Count.ShouldBe(1); var propertyChange = entityChange.PropertyChanges.Single(pc => pc.PropertyName == nameof(Post.BlogId)); propertyChange.NewValue.ShouldBe("2"); propertyChange.OriginalValue.ShouldBe("1"); propertyChange.PropertyTypeFullName.ShouldBe(typeof(Post).GetProperty(nameof(Post.BlogId)).PropertyType .FullName); return true; }; _entityHistoryStore.Received().Save(Arg.Is<EntityChangeSet>(s => predicate(s))); } [Fact] public void Should_Write_History_For_Audited_Property_Foreign_Key_Collection() { WithUnitOfWork(() => { var blog1 = _blogRepository.Single(b => b.Name == "test-blog-1"); var post10 = new Post {Blog = blog1, Title = "test-post-10-title", Body = "test-post-10-body"}; // Change navigation property by adding into collection blog1.Posts.Add(post10); _blogRepository.Update(blog1); }); Predicate<EntityChangeSet> predicate = s => { s.EntityChanges.Count.ShouldBe(2); /* Post is not in Configuration.Selectors */ /* Post.Blog has Audited attribute */ var entityChangePost = s.EntityChanges.Single(ec => ec.EntityTypeFullName == typeof(Post).FullName); entityChangePost.ChangeType.ShouldBe(EntityChangeType.Created); entityChangePost.PropertyChanges.Count.ShouldBe(1); var propertyChange1 = entityChangePost.PropertyChanges.Single(pc => pc.PropertyName == nameof(Post.BlogId)); propertyChange1.OriginalValue.ShouldBeNull(); propertyChange1.NewValue.ShouldNotBeNull(); /* Blog has Audited attribute. */ var entityChangeBlog = s.EntityChanges.Single(ec => ec.EntityTypeFullName == typeof(Blog).FullName); entityChangeBlog.ChangeType.ShouldBe(EntityChangeType.Updated); entityChangeBlog.PropertyChanges.Count.ShouldBe(0); return true; }; _entityHistoryStore.Received().Save(Arg.Is<EntityChangeSet>(s => predicate(s))); } [Fact] public void Should_Write_History_For_Audited_Property_Foreign_Key_Shadow() { /* Comment has Audited attribute. */ var post1KeyValue = new Dictionary<string, object>(); var post2KeyValue = new Dictionary<string, object>(); WithUnitOfWork(() => { var post2 = _postRepository.Single(p => p.Body == "test-post-2-body"); post2KeyValue.Add("Id", post2.Id); var comment1 = _commentRepository.Single(c => c.Content == "test-comment-1-content"); post1KeyValue.Add("Id", comment1.Post.Id); // Change foreign key by assigning navigation property comment1.Post = post2; _commentRepository.Update(comment1); }); Predicate<EntityChangeSet> predicate = s => { s.EntityChanges.Count.ShouldBe(1); var entityChange = s.EntityChanges.Single(ec => ec.EntityTypeFullName == typeof(Comment).FullName); entityChange.PropertyChanges.Count.ShouldBe(1); var propertyChange = entityChange.PropertyChanges.Single(pc => pc.PropertyName == nameof(Comment.Post)); propertyChange.NewValue.ShouldBe(post2KeyValue.ToJsonString()); propertyChange.OriginalValue.ShouldBe(post1KeyValue.ToJsonString()); propertyChange.PropertyTypeFullName.ShouldBe(typeof(Comment).GetProperty(nameof(Comment.Post)) .PropertyType.FullName); return true; }; _entityHistoryStore.Received().Save(Arg.Is<EntityChangeSet>(s => predicate(s))); } [Fact] public void Should_Write_History_But_Not_For_Property_If_Disabled_History_Tracking() { /* Blog.Name has DisableAuditing attribute. */ WithUnitOfWork(() => { var blog1 = _blogRepository.Single(b => b.Name == "test-blog-1"); blog1.Name = null; _blogRepository.Update(blog1); }); Predicate<EntityChangeSet> predicate = s => { s.EntityChanges.Count.ShouldBe(1); var entityChange = s.EntityChanges.Single(ec => ec.EntityTypeFullName == typeof(Blog).FullName); entityChange.ChangeType.ShouldBe(EntityChangeType.Updated); entityChange.EntityId.ShouldBe(entityChange.EntityEntry.As<DbEntityEntry>().Entity.As<IEntity>().Id .ToJsonString()); entityChange.PropertyChanges.Count.ShouldBe(0); return true; }; _entityHistoryStore.Received().Save(Arg.Is<EntityChangeSet>(s => predicate(s))); } [Fact] public void Should_Write_History_For_TPH_Tracked_Entities_With_One_To_Many_Relationship_Create() { var studentId = CreateStudentAndGetId(); Resolve<IEntityHistoryConfiguration>().Selectors .Add("Selected", typeof(Student), typeof(StudentLectureNote)); _entityHistoryStore.ClearReceivedCalls(); WithUnitOfWork(() => { var student = _studentRepository.Get(studentId); var lectureNote = new StudentLectureNote() { Student = student, CourseName = "Course1", Note = 100 }; student.LectureNotes.Add(lectureNote); _studentRepository.Update(student); }); Predicate<EntityChangeSet> predicate = s => { s.EntityChanges.Count.ShouldBe(1); var entityChange = s.EntityChanges.Single(ec => ec.EntityTypeFullName == typeof(StudentLectureNote).FullName); ((DateTime?)entityChange.ChangeTime).ShouldNotBe(null); entityChange.ChangeType.ShouldBe(EntityChangeType.Created); entityChange.PropertyChanges.Count.ShouldBe(3); entityChange.PropertyChanges.Single(p => p.PropertyName == nameof(StudentLectureNote.StudentId)) .NewValue.ShouldBe(studentId.ToString()); return true; }; _entityHistoryStore.Received().Save(Arg.Is<EntityChangeSet>(s => predicate(s))); } [Fact] public void Should_Write_History_For_TPH_Tracked_Entities_With_One_To_One_Relationship_Changes_Create() { var studentId = CreateStudentAndGetId(); Resolve<IEntityHistoryConfiguration>().Selectors .Add("Selected", typeof(Student), typeof(CitizenshipInformation)); _entityHistoryStore.ClearReceivedCalls(); WithUnitOfWork(() => { var student = _studentRepository.Get(studentId); var citizenshipInformation = new CitizenshipInformation() { Student = student, CitizenShipId = "123qwe" }; student.CitizenshipInformation = citizenshipInformation; _studentRepository.Update(student); }); Predicate<EntityChangeSet> predicate = s => { s.EntityChanges.Count.ShouldBe(1); var entityChange = s.EntityChanges.Single(ec => ec.EntityTypeFullName == typeof(CitizenshipInformation).FullName); ((DateTime?)entityChange.ChangeTime).ShouldNotBe(null); entityChange.ChangeType.ShouldBe(EntityChangeType.Created); entityChange.PropertyChanges.Count.ShouldBe(1); entityChange.PropertyChanges.Single(p => p.PropertyName == nameof(CitizenshipInformation.CitizenShipId)) .NewValue.ShouldBe("\"123qwe\""); return true; }; _entityHistoryStore.Received().Save(Arg.Is<EntityChangeSet>(s => predicate(s))); } [Fact] public void Should_Write_History_For_TPH_Tracked_Entities_With_One_To_One_Relationship_Changes_Update() { var studentId = CreateStudentWithCitizenshipAndGetId(); Resolve<IEntityHistoryConfiguration>().Selectors .Add("Selected", typeof(Student), typeof(CitizenshipInformation)); _entityHistoryStore.ClearReceivedCalls(); WithUnitOfWork(() => { var student = _studentRepository.GetAll().Include(x => x.CitizenshipInformation) .Single(x => x.Id == studentId); student.CitizenshipInformation.CitizenShipId = "qwe123"; _studentRepository.Update(student); }); Predicate<EntityChangeSet> predicate = s => { s.EntityChanges.Count.ShouldBe(1); var entityChange = s.EntityChanges.Single(ec => ec.EntityTypeFullName == typeof(CitizenshipInformation).FullName); ((DateTime?)entityChange.ChangeTime).ShouldNotBe(null); entityChange.ChangeType.ShouldBe(EntityChangeType.Updated); entityChange.PropertyChanges.Count.ShouldBe(1); var idChange = entityChange.PropertyChanges.Single(p => p.PropertyName == nameof(CitizenshipInformation.CitizenShipId)); idChange.OriginalValue.ShouldBe("\"123qwe\""); idChange.NewValue.ShouldBe("\"qwe123\""); return true; }; _entityHistoryStore.Received().Save(Arg.Is<EntityChangeSet>(s => predicate(s))); } private int CreateStudentAndGetId() { var student = new Student() { Name = "TestName", IdCard = "TestIdCard", Address = "TestAddress", Grade = 1, }; return _studentRepository.InsertAndGetId(student); } private int CreateStudentWithCitizenshipAndGetId() { var student = new Student() { Name = "TestName", IdCard = "TestIdCard", Address = "TestAddress", Grade = 1, CitizenshipInformation = new CitizenshipInformation() { CitizenShipId = "123qwe" } }; return _studentRepository.InsertAndGetId(student); } [Fact] public void Should_Not_Save_Empty_PropertyChanges() { // Arrange // Forward calls from substitute to implementation var entityHistoryStore = Resolve<EntityHistoryStore>(); _entityHistoryStore.When(x => x.SaveAsync(Arg.Any<EntityChangeSet>())) .Do(callback => entityHistoryStore.SaveAsync(callback.Arg<EntityChangeSet>())); _entityHistoryStore.When(x => x.Save(Arg.Any<EntityChangeSet>())) .Do(callback => entityHistoryStore.Save(callback.Arg<EntityChangeSet>())); // Act int itemId = 0; WithUnitOfWork(() => { var foo = new Foo { Audited = "s1" }; itemId = _fooRepository.InsertAndGetId(foo); }); UsingDbContext((context) => { context.EntityChanges.Count(e => e.TenantId == 1).ShouldBe(1); context.EntityChangeSets.Count(e => e.TenantId == 1).ShouldBe(1); context.EntityPropertyChanges.Count(e => e.TenantId == 1).ShouldBe(1); }); WithUnitOfWork(() => { var foo = _fooRepository.Get(itemId); foo.NonAudited = "s2"; _fooRepository.Update(foo); }); // Assert UsingDbContext((context) => { context.EntityChanges.Count(e => e.TenantId == 1).ShouldBe(1); context.EntityChangeSets.Count(e => e.TenantId == 1).ShouldBe(1); context.EntityPropertyChanges.Count(e => e.TenantId == 1).ShouldBe(1); }); } [Fact] public void Should_Work_Properly_With_Large_Data() { var entityHistoryStore = Resolve<EntityHistoryStore>(); _entityHistoryStore.When(x => x.SaveAsync(Arg.Any<EntityChangeSet>())) .Do(callback => entityHistoryStore.SaveAsync(callback.Arg<EntityChangeSet>())); _entityHistoryStore.When(x => x.Save(Arg.Any<EntityChangeSet>())) .Do(callback => entityHistoryStore.Save(callback.Arg<EntityChangeSet>())); StringBuilder stringBuilder = new StringBuilder(); for (int i = 0; i <= EntityPropertyChange.MaxValueLength+1; i++) { stringBuilder.Append("a"); } var bigStringWithTruncateWithPostfix = stringBuilder.ToString().ToJsonString().TruncateWithPostfix(EntityPropertyChange.MaxValueLength); // Act int itemId = 0; WithUnitOfWork(() => { var foo = new Foo { Audited = stringBuilder.ToString() }; itemId = _fooRepository.InsertAndGetId(foo); }); UsingDbContext((context) => { context.EntityChanges.Count(e => e.TenantId == 1).ShouldBe(1); context.EntityChangeSets.Count(e => e.TenantId == 1).ShouldBe(1); context.EntityPropertyChanges.Count(e => e.TenantId == 1).ShouldBe(1); var change = context.EntityPropertyChanges.Single(); change.OriginalValue.ShouldBeNull(); change.NewValue.ShouldBe(bigStringWithTruncateWithPostfix); }); WithUnitOfWork(() => { var foo = _fooRepository.Get(itemId); foo.Audited = stringBuilder.ToString() + "bbbbbbbbbbbbbb"; _fooRepository.Update(foo); }); UsingDbContext((context) => { context.EntityChanges.Count(e => e.TenantId == 1).ShouldBe(2); context.EntityChangeSets.Count(e => e.TenantId == 1).ShouldBe(2); context.EntityPropertyChanges.Count(e => e.TenantId == 1).ShouldBe(2); var changes = context.EntityPropertyChanges.ToList(); changes[0].OriginalValue.ShouldBeNull(); changes[0].NewValue.ShouldBe(bigStringWithTruncateWithPostfix); //even though the original value and new value are equal, changes will be detected on entity //(the actual values have been truncated because they are too large to be stored. truncated values are equal but actual values are not) changes[1].OriginalValue.ShouldBe(bigStringWithTruncateWithPostfix); changes[1].NewValue.ShouldBe(bigStringWithTruncateWithPostfix); //hashes must be different changes[1].NewValueHash.ShouldNotBe(changes[1].OriginalValueHash); }); } #endregion #region CASES DON'T WRITE HISTORY [Fact] public void Should_Not_Write_History_If_Disabled() { Resolve<IEntityHistoryConfiguration>().IsEnabled = false; /* Blog has Audited attribute. */ var newValue = "http://testblog1-changed.myblogs.com"; var originalValue = UpdateBlogUrlAndGetOriginalValue(newValue); _entityHistoryStore.DidNotReceive().Save(Arg.Any<EntityChangeSet>()); } [Fact] public void Should_Not_Write_History_If_Not_Audited_And_Not_Selected() { /* Advertisement does not have Audited attribute. */ Resolve<IEntityHistoryConfiguration>().Selectors.Clear(); WithUnitOfWork(() => { _advertisementRepository.Insert(new Advertisement { Banner = "not-selected-advertisement" }); }); _entityHistoryStore.DidNotReceive().Save(Arg.Any<EntityChangeSet>()); } [Fact] public void Should_Not_Write_History_If_Ignored() { Resolve<IEntityHistoryConfiguration>().IgnoredTypes.Add(typeof(Blog)); /* Blog has Audited attribute. */ var newValue = "http://testblog1-changed.myblogs.com"; var originalValue = UpdateBlogUrlAndGetOriginalValue(newValue); _entityHistoryStore.DidNotReceive().Save(Arg.Any<EntityChangeSet>()); } [Fact] public void Should_Not_Write_History_If_Selected_But_Ignored() { Resolve<IEntityHistoryConfiguration>().Selectors.Add("Selected", typeof(Blog)); Resolve<IEntityHistoryConfiguration>().IgnoredTypes.Add(typeof(Blog)); /* Blog has Audited attribute. */ var newValue = "http://testblog1-changed.myblogs.com"; var originalValue = UpdateBlogUrlAndGetOriginalValue(newValue); _entityHistoryStore.DidNotReceive().Save(Arg.Any<EntityChangeSet>()); } [Fact] public void Should_Not_Write_History_If_Property_Has_No_Audited_Attribute() { /* Advertisement.Banner does not have Audited attribute. */ WithUnitOfWork(() => { var advertisement1 = _advertisementRepository.Single(a => a.Banner == "test-advertisement-1"); advertisement1.Banner = null; _advertisementRepository.Update(advertisement1); }); _entityHistoryStore.DidNotReceive().Save(Arg.Any<EntityChangeSet>()); } [Fact] public void Should_Not_Write_History_If_Invalid_Entity_Has_Property_With_Audited_Attribute_Created() { //Act UsingDbContext((context) => { context.Categories.Add(new Category {DisplayName = "My Category"}); context.SaveChanges(); }); //Assert _entityHistoryStore.DidNotReceive().Save(Arg.Any<EntityChangeSet>()); } [Fact] public void Should_Not_Write_History_If_Invalid_Entity_Has_Property_With_Audited_Attribute_Updated() { //Arrange UsingDbContext((context) => { context.Categories.Add(new Category {DisplayName = "My Category"}); context.SaveChanges(); }); _entityHistoryStore.ClearReceivedCalls(); //Act UsingDbContext((context) => { var category = context.Categories.Single(c => c.DisplayName == "My Category"); category.DisplayName = "Invalid Category"; context.SaveChanges(); }); //Assert _entityHistoryStore.DidNotReceive().Save(Arg.Any<EntityChangeSet>()); } [Fact] public void Should_Not_Write_History_If_Invalid_Entity_Has_Property_With_Audited_Attribute_Deleted() { //Arrange UsingDbContext((context) => { context.Categories.Add(new Category {DisplayName = "My Category"}); context.SaveChanges(); }); _entityHistoryStore.ClearReceivedCalls(); //Act UsingDbContext((context) => { var category = context.Categories.Single(c => c.DisplayName == "My Category"); context.Categories.Remove(category); context.SaveChanges(); }); //Assert _entityHistoryStore.DidNotReceive().Save(Arg.Any<EntityChangeSet>()); } [Fact] public void Should_Not_Write_History_For_Audited_Entity_By_Default() { //Arrange UsingDbContext((context) => { context.Countries.Add(new Country {CountryCode = "My Country"}); context.SaveChanges(); }); //Assert _entityHistoryStore.DidNotReceive().Save(Arg.Any<EntityChangeSet>()); } [Fact] public void Should_Not_Write_History_For_Not_Audited_Entities_Shadow_Property() { // PermissionSetting has Discriminator column (shadow property) for RolePermissionSetting //Arrange UsingDbContext((context) => { var role = context.Roles.FirstOrDefault(); role.ShouldNotBeNull(); context.RolePermissions.Add(new RolePermissionSetting() { Name = "Test", RoleId = role.Id }); context.SaveChanges(); }); //Assert _entityHistoryStore.DidNotReceive().Save(Arg.Any<EntityChangeSet>()); } #endregion private int CreateBlogAndGetId() { int blog2Id = 0; WithUnitOfWork(() => { var blog2 = new Blog("test-blog-2", "http://testblog2.myblogs.com", "blogger-2"); blog2Id = _blogRepository.InsertAndGetId(blog2); }); return blog2Id; } private string UpdateBlogUrlAndGetOriginalValue(string newValue) { string originalValue = null; WithUnitOfWork(() => { var blog1 = _blogRepository.Single(b => b.Name == "test-blog-1"); originalValue = blog1.Url; blog1.ChangeUrl(newValue); _blogRepository.Update(blog1); }); return originalValue; } } #region Helpers internal static class IEnumerableExtensions { internal static EntityPropertyChange FirstOrDefault(this IEnumerable<EntityPropertyChange> enumerable) { var enumerator = enumerable.GetEnumerator(); enumerator.MoveNext(); return enumerator.Current; } } #endregion }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ using System; using Lucene.Net.Util; namespace Lucene.Net.Support { /// <summary> /// Mimics Java's Character class. /// </summary> public class Character { private const char charNull = '\0'; private const char charZero = '0'; private const char charA = 'a'; public const int MAX_RADIX = 36; public const int MIN_RADIX = 2; public const int MAX_CODE_POINT = 0x10FFFF; public const int MIN_CODE_POINT = 0x000000; public const char MAX_SURROGATE = '\uDFFF'; public const char MIN_SURROGATE = '\uD800'; public const char MIN_LOW_SURROGATE = '\uDC00'; public const char MAX_LOW_SURROGATE = '\uDFFF'; public const char MIN_HIGH_SURROGATE = '\uD800'; public const char MAX_HIGH_SURROGATE = '\uDBFF'; public static int MIN_SUPPLEMENTARY_CODE_POINT = 0x010000; /// <summary> /// /// </summary> /// <param name="digit"></param> /// <param name="radix"></param> /// <returns></returns> public static char ForDigit(int digit, int radix) { // if radix or digit is out of range, // return the null character. if (radix < Character.MIN_RADIX) return charNull; if (radix > Character.MAX_RADIX) return charNull; if (digit < 0) return charNull; if (digit >= radix) return charNull; // if digit is less than 10, // return '0' plus digit if (digit < 10) return (char)((int)charZero + digit); // otherwise, return 'a' plus digit. return (char)((int)charA + digit - 10); } public static int ToChars(int codePoint, char[] dst, int dstIndex) { var converted = UnicodeUtil.ToCharArray(new[] {codePoint}, 0, 1); Array.Copy(converted, 0, dst, dstIndex, converted.Length); return converted.Length; } public static char[] ToChars(int codePoint) { return UnicodeUtil.ToCharArray(new[] {codePoint}, 0, 1); } public static int ToCodePoint(char high, char low) { // Optimized form of: // return ((high - MIN_HIGH_SURROGATE) << 10) // + (low - MIN_LOW_SURROGATE) // + MIN_SUPPLEMENTARY_CODE_POINT; return ((high << 10) + low) + (MIN_SUPPLEMENTARY_CODE_POINT - (MIN_HIGH_SURROGATE << 10) - MIN_LOW_SURROGATE); } public static int ToLowerCase(int codePoint) { // LUCENENET TODO do we really need this? what's wrong with char.ToLower() ? var str = UnicodeUtil.NewString(new[] {codePoint}, 0, 1); str = str.ToLower(); return CodePointAt(str, 0); } public static int CharCount(int codePoint) { // A given codepoint can be represented in .NET either by 1 char (up to UTF16), // or by if it's a UTF32 codepoint, in which case the current char will be a surrogate return codePoint >= MIN_SUPPLEMENTARY_CODE_POINT ? 2 : 1; } public static int CodePointCount(string seq, int beginIndex, int endIndex) { int length = seq.Length; if (beginIndex < 0 || endIndex > length || beginIndex > endIndex) { throw new IndexOutOfRangeException(); } int n = endIndex - beginIndex; for (int i = beginIndex; i < endIndex;) { if (char.IsHighSurrogate(seq[i++]) && i < endIndex && char.IsLowSurrogate(seq[i])) { n--; i++; } } return n; } public static int CodePointAt(string seq, int index) { char c1 = seq[index++]; if (char.IsHighSurrogate(c1)) { if (index < seq.Length) { char c2 = seq[index]; if (char.IsLowSurrogate(c2)) { return ToCodePoint(c1, c2); } } } return c1; } public static int CodePointAt(ICharSequence seq, int index) { char c1 = seq.CharAt(index++); if (char.IsHighSurrogate(c1)) { if (index < seq.Length) { char c2 = seq.CharAt(index); if (char.IsLowSurrogate(c2)) { return ToCodePoint(c1, c2); } } } return c1; } public static int CodePointAt(char[] a, int index, int limit) { if (index >= limit || limit < 0 || limit > a.Length) { throw new IndexOutOfRangeException(); } return CodePointAtImpl(a, index, limit); } // throws ArrayIndexOutofBoundsException if index out of bounds static int CodePointAtImpl(char[] a, int index, int limit) { char c1 = a[index++]; if (char.IsHighSurrogate(c1)) { if (index < limit) { char c2 = a[index]; if (char.IsLowSurrogate(c2)) { return ToCodePoint(c1, c2); } } } return c1; } } }
// ------------------------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information. // ------------------------------------------------------------------------------ // **NOTE** This file was generated by a tool and any changes will be overwritten. namespace Microsoft.Graph { using System; using System.Collections.Generic; using System.IO; using System.Net.Http; using System.Threading; using System.Linq.Expressions; /// <summary> /// The type DirectoryRoleTemplateRequest. /// </summary> public partial class DirectoryRoleTemplateRequest : BaseRequest, IDirectoryRoleTemplateRequest { /// <summary> /// Constructs a new DirectoryRoleTemplateRequest. /// </summary> /// <param name="requestUrl">The URL for the built request.</param> /// <param name="client">The <see cref="IBaseClient"/> for handling requests.</param> /// <param name="options">Query and header option name value pairs for the request.</param> public DirectoryRoleTemplateRequest( string requestUrl, IBaseClient client, IEnumerable<Option> options) : base(requestUrl, client, options) { } /// <summary> /// Creates the specified DirectoryRoleTemplate using POST. /// </summary> /// <param name="directoryRoleTemplateToCreate">The DirectoryRoleTemplate to create.</param> /// <returns>The created DirectoryRoleTemplate.</returns> public System.Threading.Tasks.Task<DirectoryRoleTemplate> CreateAsync(DirectoryRoleTemplate directoryRoleTemplateToCreate) { return this.CreateAsync(directoryRoleTemplateToCreate, CancellationToken.None); } /// <summary> /// Creates the specified DirectoryRoleTemplate using POST. /// </summary> /// <param name="directoryRoleTemplateToCreate">The DirectoryRoleTemplate to create.</param> /// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param> /// <returns>The created DirectoryRoleTemplate.</returns> public async System.Threading.Tasks.Task<DirectoryRoleTemplate> CreateAsync(DirectoryRoleTemplate directoryRoleTemplateToCreate, CancellationToken cancellationToken) { this.ContentType = "application/json"; this.Method = "POST"; var newEntity = await this.SendAsync<DirectoryRoleTemplate>(directoryRoleTemplateToCreate, cancellationToken).ConfigureAwait(false); this.InitializeCollectionProperties(newEntity); return newEntity; } /// <summary> /// Deletes the specified DirectoryRoleTemplate. /// </summary> /// <returns>The task to await.</returns> public System.Threading.Tasks.Task DeleteAsync() { return this.DeleteAsync(CancellationToken.None); } /// <summary> /// Deletes the specified DirectoryRoleTemplate. /// </summary> /// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param> /// <returns>The task to await.</returns> public async System.Threading.Tasks.Task DeleteAsync(CancellationToken cancellationToken) { this.Method = "DELETE"; await this.SendAsync<DirectoryRoleTemplate>(null, cancellationToken).ConfigureAwait(false); } /// <summary> /// Gets the specified DirectoryRoleTemplate. /// </summary> /// <returns>The DirectoryRoleTemplate.</returns> public System.Threading.Tasks.Task<DirectoryRoleTemplate> GetAsync() { return this.GetAsync(CancellationToken.None); } /// <summary> /// Gets the specified DirectoryRoleTemplate. /// </summary> /// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param> /// <returns>The DirectoryRoleTemplate.</returns> public async System.Threading.Tasks.Task<DirectoryRoleTemplate> GetAsync(CancellationToken cancellationToken) { this.Method = "GET"; var retrievedEntity = await this.SendAsync<DirectoryRoleTemplate>(null, cancellationToken).ConfigureAwait(false); this.InitializeCollectionProperties(retrievedEntity); return retrievedEntity; } /// <summary> /// Updates the specified DirectoryRoleTemplate using PATCH. /// </summary> /// <param name="directoryRoleTemplateToUpdate">The DirectoryRoleTemplate to update.</param> /// <returns>The updated DirectoryRoleTemplate.</returns> public System.Threading.Tasks.Task<DirectoryRoleTemplate> UpdateAsync(DirectoryRoleTemplate directoryRoleTemplateToUpdate) { return this.UpdateAsync(directoryRoleTemplateToUpdate, CancellationToken.None); } /// <summary> /// Updates the specified DirectoryRoleTemplate using PATCH. /// </summary> /// <param name="directoryRoleTemplateToUpdate">The DirectoryRoleTemplate to update.</param> /// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param> /// <returns>The updated DirectoryRoleTemplate.</returns> public async System.Threading.Tasks.Task<DirectoryRoleTemplate> UpdateAsync(DirectoryRoleTemplate directoryRoleTemplateToUpdate, CancellationToken cancellationToken) { this.ContentType = "application/json"; this.Method = "PATCH"; var updatedEntity = await this.SendAsync<DirectoryRoleTemplate>(directoryRoleTemplateToUpdate, cancellationToken).ConfigureAwait(false); this.InitializeCollectionProperties(updatedEntity); return updatedEntity; } /// <summary> /// Adds the specified expand value to the request. /// </summary> /// <param name="value">The expand value.</param> /// <returns>The request object to send.</returns> public IDirectoryRoleTemplateRequest Expand(string value) { this.QueryOptions.Add(new QueryOption("$expand", value)); return this; } /// <summary> /// Adds the specified expand value to the request. /// </summary> /// <param name="expandExpression">The expression from which to calculate the expand value.</param> /// <returns>The request object to send.</returns> public IDirectoryRoleTemplateRequest Expand(Expression<Func<DirectoryRoleTemplate, object>> expandExpression) { if (expandExpression == null) { throw new ArgumentNullException(nameof(expandExpression)); } string error; string value = ExpressionExtractHelper.ExtractMembers(expandExpression, out error); if (value == null) { throw new ArgumentException(error, nameof(expandExpression)); } else { this.QueryOptions.Add(new QueryOption("$expand", value)); } return this; } /// <summary> /// Adds the specified select value to the request. /// </summary> /// <param name="value">The select value.</param> /// <returns>The request object to send.</returns> public IDirectoryRoleTemplateRequest Select(string value) { this.QueryOptions.Add(new QueryOption("$select", value)); return this; } /// <summary> /// Adds the specified select value to the request. /// </summary> /// <param name="selectExpression">The expression from which to calculate the select value.</param> /// <returns>The request object to send.</returns> public IDirectoryRoleTemplateRequest Select(Expression<Func<DirectoryRoleTemplate, object>> selectExpression) { if (selectExpression == null) { throw new ArgumentNullException(nameof(selectExpression)); } string error; string value = ExpressionExtractHelper.ExtractMembers(selectExpression, out error); if (value == null) { throw new ArgumentException(error, nameof(selectExpression)); } else { this.QueryOptions.Add(new QueryOption("$select", value)); } return this; } /// <summary> /// Initializes any collection properties after deserialization, like next requests for paging. /// </summary> /// <param name="directoryRoleTemplateToInitialize">The <see cref="DirectoryRoleTemplate"/> with the collection properties to initialize.</param> private void InitializeCollectionProperties(DirectoryRoleTemplate directoryRoleTemplateToInitialize) { } } }
// ******************************************************************************************************** // Product Name: DotSpatial.Data.dll // Description: The data access libraries for the DotSpatial project. // ******************************************************************************************************** // // The Original Code is from MapWindow.dll version 6.0 // // The Initial Developer of this Original Code is Ted Dunsford. Created 4/29/2009 9:20:13 AM // // Contributor(s): (Open source contributors should list themselves and their modifications here). // // ******************************************************************************************************** using System.Collections.Generic; namespace DotSpatial.Data { /// <summary> /// A named list preserves a 1:1 mapping between names and items. It can be used to /// reference information in either direction. It essentially provides a string /// handle for working with generic typed ILists. This cannot instantiate new /// items. (Creating a default T would not work, for instance, for an interface). /// </summary> public class NamedList<T> : INamedList { #region Private Variables private string _baseName; private Dictionary<string, T> _items; // search by name private IList<T> _list; // determines order private Dictionary<T, string> _names; // search by item #endregion #region Constructors /// <summary> /// Creates a new instance of NamedList /// </summary> public NamedList() { _list = new List<T>(); // default setting _items = new Dictionary<string, T>(); _names = new Dictionary<T, string>(); } /// <summary> /// Creates a new instance of a named list. /// </summary> /// <param name="values">The values to use for the content.</param> public NamedList(IList<T> values) { _list = values; _items = new Dictionary<string, T>(); _names = new Dictionary<T, string>(); RefreshNames(); } /// <summary> /// Creates a new instance of a named list. /// </summary> /// <param name="values">The values to use for the content.</param> /// <param name="baseName">The string that should preceed the numbering to describe the individual items.</param> public NamedList(IList<T> values, string baseName) { _list = values; _items = new Dictionary<string, T>(); _names = new Dictionary<T, string>(); _baseName = baseName; RefreshNames(); } #endregion #region Methods /// <summary> /// Gets or sets the item corresponding to the specified name. Setting this /// will re-use the same name and position in the list, but set a new object. /// </summary> /// <param name="name">The string name of the item to obtain</param> /// <returns>The item of type T corresponding to the specified name</returns> public T this[string name] { get { if (_items.ContainsKey(name)) return _items[name]; return default(T); } set { T oldItem = _items[name]; _names.Remove(oldItem); int index = _list.IndexOf(oldItem); _list.RemoveAt(index); _list.Insert(index, value); _items[name] = value; _names.Add(value, name); } } /// <summary> /// Re-orders the list so that the index of the specifeid item is lower, /// and threfore will be drawn earlier, and therefore should appear /// in a lower position on the list. /// </summary> /// <param name="name"></param> public void Demote(string name) { Demote(_items[name]); } /// <summary> /// Gets the name of the item corresponding /// </summary> /// <param name="value">The item cast as an object.</param> /// <returns>The string name of the specified object, or null if the cast fails.</returns> public string GetNameOfObject(object value) { T item = Global.SafeCastTo<T>(value); if (item == null) return null; return GetName(item); } /// <summary> /// Gets the item with the specified name as an object. /// This enables the INamedList to work with items even /// if it doesn't know the strong type. /// </summary> /// <param name="name">The string name of the item to retrieve</param> /// <returns>The actual item cast as an object.</returns> public object GetItem(string name) { return this[name]; } /// <summary> /// Gets the list of names for the items currently stored in the list, /// in the sequence defined by the list of items. /// </summary> public string[] GetNames() { List<string> result = new List<string>(); foreach (T item in _list) { if (_names.ContainsKey(item) == false) { RefreshNames(); break; } } foreach (T item in _list) { result.Add(_names[item]); } return result.ToArray(); } /// <summary> /// Re-orders the list so that the index of the specified item is higher, /// and therefore will be drawn later, and therefore should appear /// in a higher position on the list. /// </summary> /// <param name="name"></param> public void Promote(string name) { Promote(_items[name]); } /// <summary> /// Updates the names to match the current set of actual items. /// </summary> public void RefreshNames() { // When re-ordering, we want to keep the name like category 0 the same, // so we can't just clear the values. Instead, to see the item move, // the name has to stay with the item. List<T> deleteItems = new List<T>(); foreach (T item in _names.Keys) { if (_list.Contains(item) == false) { deleteItems.Add(item); } } foreach (T item in deleteItems) { _names.Remove(item); } List<string> deleteNames = new List<string>(); foreach (string name in _items.Keys) { if (_names.ContainsValue(name) == false) { deleteNames.Add(name); } } foreach (string name in deleteNames) { _items.Remove(name); } foreach (T item in _list) { if (_names.ContainsKey(item) == false) { string name = BaseName + 0; int i = 1; while (_items.ContainsKey(name)) { name = BaseName + i; i++; } _names.Add(item, name); _items.Add(name, item); } } } /// <summary> /// Removes the item with the specified name from the list. /// </summary> /// <param name="name">The string name of the item to remove</param> public void Remove(string name) { Remove(_items[name]); } /// <summary> /// Re-orders the list so that this item appears closer to the 0 index. /// </summary> /// <param name="item"></param> public void Demote(T item) { int index = _list.IndexOf(item); if (index == -1) return; if (index == 0) return; _list.RemoveAt(index); _list.Insert(index - 1, item); } /// <summary> /// Gets the string name for the specified item /// </summary> /// <param name="item">The item of type T to find the name for</param> /// <returns>The string name corresponding to the specified item.</returns> public string GetName(T item) { if (_names.ContainsKey(item)) return _names[item]; return null; } /// <summary> /// Re-orders the list so that the index of the specified item is higher, /// and therefore will be drawn later, and therefore should appear /// in a higher position on the list. /// </summary> /// <param name="item"></param> public void Promote(T item) { int index = _list.IndexOf(item); if (index == -1) return; if (index == _list.Count - 1) return; _list.RemoveAt(index); _list.Insert(index + 1, item); } /// <summary> /// Removes the specified item /// </summary> /// <param name="item">The item to remove.</param> public void Remove(T item) { _list.Remove(item); RefreshNames(); } #endregion #region Properties /// <summary> /// Gets the list of actual items. This is basically a reference copy of /// the actual collection of items to be contained in this named list. /// </summary> public IList<T> Items { get { return _list; } set { _list = value; } } /// <summary> /// Gets or sets the base name to use for naming items /// </summary> public string BaseName { get { return _baseName; } set { _baseName = value; } } /// <summary> /// Gets the count of the items in the list. /// </summary> public int Count { get { return _items.Count; } } #endregion } }
/* * MindTouch Dream - a distributed REST framework * Copyright (C) 2006-2014 MindTouch, Inc. * www.mindtouch.com [email protected] * * For community documentation and downloads visit mindtouch.com; * please review the licensing section. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System.Collections.Generic; using System.Net; using System.Net.Mail; using System.Net.Mime; using System.Text; using Autofac; using log4net; using MindTouch.Tasking; using MindTouch.Xml; namespace MindTouch.Dream.Services { using Yield = IEnumerator<IYield>; [DreamService("MindTouch Email Sender", "Copyright (c) 2006-2014 MindTouch, Inc.", SID = new[] { "sid://mindtouch.com/2009/01/dream/email" } )] [DreamServiceConfig("smtp-host", "hostname", "")] [DreamServiceConfig("smtp-port", "port", "")] [DreamServiceConfig("smtp-auth-user", "username", "")] [DreamServiceConfig("smtp-auth-password", "username", "")] [DreamServiceConfig("use-ssl", "bool", "")] internal class EmailService : DreamService { //--- Types --- //--- Class Fields --- private static readonly ILog _log = LogUtils.CreateLog(); //--- Fields --- private string _emailApikey; private readonly Dictionary<string, SmtpSettings> _smtpSettings = new Dictionary<string, SmtpSettings>(); private SmtpSettings _defaultSettings; private ISmtpClientFactory _clientFactory; //--- Features --- [DreamFeature("POST:message", "Send an email")] internal Yield SendEmail(DreamContext context, DreamMessage request, Result<DreamMessage> response) { var mailDoc = request.ToDocument(); var mailMsg = new MailMessage(); foreach(XDoc to in mailDoc["to"]) { var email = to.AsText; if(string.IsNullOrEmpty(email)) { continue; } _log.DebugFormat("Adding TO address '{0}'", email); mailMsg.To.Add(GetEmailAddress(email)); } if(mailMsg.To.Count == 0) { throw new DreamBadRequestException("message does not contains any TO email addresses"); } var from = mailDoc["from"].AsText; _log.DebugFormat("from address: {0}", from); mailMsg.From = GetEmailAddress(from); mailMsg.Subject = mailDoc["subject"].AsText; string plaintextBody = null; foreach(var body in mailDoc["body"]) { AlternateView view; if(body["@html"].AsBool ?? false) { _log.Debug("adding html body"); view = AlternateView.CreateAlternateViewFromString(body.Contents, Encoding.UTF8, "text/html"); view.TransferEncoding = TransferEncoding.Base64; mailMsg.AlternateViews.Add(view); } else { plaintextBody = body.Contents; } } if(!string.IsNullOrEmpty(plaintextBody)) { _log.Debug("adding plain text body"); mailMsg.Body = plaintextBody; } foreach(var header in mailDoc["headers/header"]) { var name = header["name"].AsText; var value = header["value"].AsText; _log.DebugFormat("adding header '{0}': {1}", name, value); mailMsg.Headers.Add(name, value); } GetClient(mailDoc["@configuration"].AsText).Send(mailMsg); response.Return(DreamMessage.Ok()); yield break; } private MailAddress GetEmailAddress(string addressString) { var address = new MailAddress(addressString); if(string.IsNullOrEmpty(address.DisplayName)) { address = new MailAddress(addressString, addressString); } return address; } [DreamFeature("PUT:configuration/{configuration}", "Set smtp settings for a named configuration")] internal Yield ConfigureSmtp(DreamContext context, DreamMessage request, Result<DreamMessage> response) { var configuration = context.GetParam("configuration"); var settingsDoc = request.ToDocument(); _log.DebugFormat("configuring settings for configuration '{0}'", configuration); var host = settingsDoc["smtp-host"].AsText; var apikey = settingsDoc["apikey"].AsText; if(string.IsNullOrEmpty(host) && string.IsNullOrEmpty(apikey)) { response.Return(DreamMessage.BadRequest("must specify either new smtp config with a host or specify an apikey")); yield break; } SmtpSettings settings; if(string.IsNullOrEmpty(host)) { settings = new SmtpSettings() { Host = _defaultSettings.Host, Apikey = apikey, AuthPassword = _defaultSettings.AuthPassword, AuthUser = _defaultSettings.AuthUser, EnableSsl = _defaultSettings.EnableSsl, Port = _defaultSettings.Port, }; } else { _log.DebugFormat("Smtp Host: {0}", host); settings = new SmtpSettings { Host = host, AuthUser = settingsDoc["smtp-auth-user"].AsText, AuthPassword = settingsDoc["smtp-auth-password"].AsText, Apikey = apikey, // Note (arnec): ssl requires mono 2.0 and likely root certificate import via 'mozroots --import --ask-remove --machine' EnableSsl = settingsDoc["use-ssl"].AsBool ?? false }; if(settingsDoc["smtp-port"].AsInt.HasValue) { settings.Port = settingsDoc["smtp-port"].AsInt.Value; } } lock(_smtpSettings) { _smtpSettings[configuration] = settings; } response.Return(DreamMessage.Ok()); yield break; } [DreamFeature("GET:configuration/{configuration}", "Get smtp settings for a named configuration (minus password)")] internal Yield InspectSmtp(DreamContext context, DreamMessage request, Result<DreamMessage> response) { string configuration = context.GetParam("configuration"); SmtpSettings settings; DreamMessage msg; lock(_smtpSettings) { if(_smtpSettings.TryGetValue(configuration, out settings)) { msg = DreamMessage.Ok(new XDoc("smtp") .Elem("smtp-host", settings.Host) .Elem("smtp-port", settings.Port) .Elem("use-ssl", settings.EnableSsl) .Elem("smtp-auth-user", settings.AuthUser)); } else { msg = DreamMessage.NotFound("No such configuration"); } } response.Return(msg); yield break; } [DreamFeature("DELETE:configuration/{configuration}", "Set smtp settings for a specific wiki")] internal Yield DeleteSmtpSettings(DreamContext context, DreamMessage request, Result<DreamMessage> response) { string configuration = context.GetParam("configuration"); _log.DebugFormat("removing settings for configuration '{0}'", configuration); lock(_smtpSettings) { _smtpSettings.Remove(configuration); } response.Return(DreamMessage.Ok()); yield break; } //--- Methods --- protected override Yield Start(XDoc config, ILifetimeScope container, Result result) { yield return Coroutine.Invoke(base.Start, config, new Result()); _defaultSettings = new SmtpSettings { Host = config["smtp-host"].AsText }; if(string.IsNullOrEmpty(_defaultSettings.Host)) { _defaultSettings.Host = "localhost"; } _log.DebugFormat("Smtp Host: {0}", _defaultSettings.Host); // Note (arnec): ssl requires mono 2.0 and likely root certificate import via 'mozroots --import --ask-remove --machine' _defaultSettings.EnableSsl = config["use-ssl"].AsBool ?? false; if(config["smtp-port"].AsInt.HasValue) { _defaultSettings.Port = config["smtp-port"].AsInt.Value; } _defaultSettings.AuthUser = config["smtp-auth-user"].AsText; _defaultSettings.AuthPassword = config["smtp-auth-password"].AsText; _clientFactory = container.IsRegistered<ISmtpClientFactory>() ? container.Resolve<ISmtpClientFactory>() : new SmtpClientFactory(); // get an apikey for accessing the services without it's private/internal keys _emailApikey = config["apikey"].AsText; result.Return(); } protected override Yield Stop(Result result) { _smtpSettings.Clear(); _defaultSettings = null; yield return Coroutine.Invoke(base.Stop, new Result()); result.Return(); } private ISmtpClient GetClient(string configuration) { _log.DebugFormat("Getting smtp settings for configuration '{0}'", configuration); SmtpSettings settings; lock(_smtpSettings) { if(!_smtpSettings.TryGetValue(configuration, out settings)) { _log.DebugFormat("Using default settings"); settings = _defaultSettings; } } return _clientFactory.CreateClient(settings); } protected override DreamAccess DetermineAccess(DreamContext context, string key) { if(!string.IsNullOrEmpty(key)) { // Grant internal access for proper apikey if(!string.IsNullOrEmpty(_emailApikey) && _emailApikey == key) { return DreamAccess.Internal; } // Check whether we can test an apikey from the targeted configuration var configuration = context.GetParam("configuration", null); if(string.IsNullOrEmpty(configuration) && context.Request.HasDocument) { configuration = context.Request.ToDocument()["@configuration"].AsText; } if(!string.IsNullOrEmpty(configuration)) { SmtpSettings settings; lock(_smtpSettings) { _smtpSettings.TryGetValue(configuration, out settings); } if(settings != null && !string.IsNullOrEmpty(settings.Apikey) && settings.Apikey == key) { return DreamAccess.Internal; } } } return base.DetermineAccess(context, key); } } /// <summary> /// Settings container for clients created with <see cref="ISmtpClientFactory"/>. /// </summary> public class SmtpSettings { //--- Fields --- /// <summary> /// Smtp Host /// </summary> public string Host; /// <summary> /// Optional Smtp Port; /// </summary> public int? Port; /// <summary> /// Optional Authentication User /// </summary> public string AuthUser; /// <summary> /// Optional Authentication Password /// </summary> public string AuthPassword; /// <summary> /// Try to use secure connection? /// </summary> public bool EnableSsl; /// <summary> /// Apikey that will be provided to authorize the use of these settings. /// </summary> public string Apikey; } /// <summary> /// Implementation of <see cref="ISmtpClientFactory"/> /// </summary> public class SmtpClientFactory : ISmtpClientFactory { //--- Class Fields --- private static readonly ILog _log = LogUtils.CreateLog(); //--- Methods --- /// <summary> /// Create a new <see cref="ISmtpClient"/>. /// </summary> /// <param name="settings">Client settings.</param> /// <returns>New <see cref="ISmtpClient"/> instance</returns> public ISmtpClient CreateClient(SmtpSettings settings) { var client = new SmtpClient { Host = settings.Host, EnableSsl = settings.EnableSsl }; _log.DebugFormat("SSL enabled: {0}", client.EnableSsl); if(settings.Port.HasValue) { client.Port = settings.Port.Value; _log.DebugFormat("using custom port: {0}", client.Port); } if(!string.IsNullOrEmpty(settings.AuthUser)) { _log.DebugFormat("using authentication user: {0}", settings.AuthUser); var credentials = new NetworkCredential(settings.AuthUser, settings.AuthPassword); client.Credentials = credentials; } return new SmtpClientWrapper(client); } } /// <summary> /// Factory for creating <see cref="ISmtpClient"/> instances. /// </summary> public interface ISmtpClientFactory { //--- Methods --- /// <summary> /// Create a new <see cref="ISmtpClient"/>. /// </summary> /// <param name="settings">Client settings.</param> /// <returns>New <see cref="ISmtpClient"/> instance</returns> ISmtpClient CreateClient(SmtpSettings settings); } /// <summary> /// Implemenation of <see cref="ISmtpClient"/> wrapping the standard <see cref="SmtpClient"/>. /// </summary> public class SmtpClientWrapper : ISmtpClient { //--- Fields --- private readonly SmtpClient _client; //--- Constructors --- /// <summary> /// Create a new <see cref="SmtpClient"/> wrapper. /// </summary> /// <param name="client"></param> public SmtpClientWrapper(SmtpClient client) { _client = client; } //--- ISmtpClient Members --- void ISmtpClient.Send(MailMessage message) { _client.Send(message); } } /// <summary> /// Simple Smtp client interface /// </summary> public interface ISmtpClient { //--- Methods --- /// <summary> /// Send a mail message. /// </summary> /// <param name="message">Message to send.</param> void Send(MailMessage message); } }
/* * [The "BSD licence"] * Copyright (c) 2005-2008 Terence Parr * All rights reserved. * * Conversion to C#: * Copyright (c) 2008-2009 Sam Harwell, Pixel Mine, Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. The name of the author may not be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ namespace Antlr.Runtime.Tree { using ArgumentNullException = System.ArgumentNullException; using CLSCompliant = System.CLSCompliantAttribute; /** <summary> * A tree node that is wrapper for a Token object. After 3.0 release * while building tree rewrite stuff, it became clear that computing * parent and child index is very difficult and cumbersome. Better to * spend the space in every tree node. If you don't want these extra * fields, it's easy to cut them out in your own BaseTree subclass. * </summary> */ [System.Serializable] public class CommonTree : BaseTree { /** <summary>A single token is the payload</summary> */ [CLSCompliant(false)] public IToken token; /** <summary> * What token indexes bracket all tokens associated with this node * and below? * </summary> */ protected int startIndex = -1; protected int stopIndex = -1; /** <summary>Who is the parent node of this node; if null, implies node is root</summary> */ CommonTree parent; /** <summary>What index is this node in the child list? Range: 0..n-1</summary> */ int childIndex = -1; public CommonTree() { } public CommonTree(CommonTree node) : base(node) { if (node == null) throw new ArgumentNullException("node"); this.token = node.token; this.startIndex = node.startIndex; this.stopIndex = node.stopIndex; } public CommonTree(IToken t) { this.token = t; } #region Properties public override int CharPositionInLine { get { if (token == null || token.CharPositionInLine == -1) { if (ChildCount > 0) { return Children[0].CharPositionInLine; } return 0; } return token.CharPositionInLine; } set { base.CharPositionInLine = value; } } public override int ChildIndex { get { return childIndex; } set { childIndex = value; } } public override bool IsNil { get { return token == null; } } public override int Line { get { if (token == null || token.Line == 0) { if (ChildCount > 0) { return Children[0].Line; } return 0; } return token.Line; } set { base.Line = value; } } public override ITree Parent { get { return parent; } set { parent = (CommonTree)value; } } public override string Text { get { if (token == null) return null; return token.Text; } set { } } public virtual IToken Token { get { return token; } set { token = value; } } public override int TokenStartIndex { get { if (startIndex == -1 && token != null) { return token.TokenIndex; } return startIndex; } set { startIndex = value; } } public override int TokenStopIndex { get { if (stopIndex == -1 && token != null) { return token.TokenIndex; } return stopIndex; } set { stopIndex = value; } } public override int Type { get { if (token == null) return TokenTypes.Invalid; return token.Type; } set { } } #endregion public override ITree DupNode() { return new CommonTree(this); } /** <summary> * For every node in this subtree, make sure it's start/stop token's * are set. Walk depth first, visit bottom up. Only updates nodes * with at least one token index &lt; 0. * </summary> */ public virtual void SetUnknownTokenBoundaries() { if (Children == null) { if (startIndex < 0 || stopIndex < 0) { startIndex = stopIndex = token.TokenIndex; } return; } for (int i = 0; i < Children.Count; i++) { ((CommonTree)Children[i]).SetUnknownTokenBoundaries(); } if (startIndex >= 0 && stopIndex >= 0) return; // already set if (Children.Count > 0) { CommonTree firstChild = (CommonTree)Children[0]; CommonTree lastChild = (CommonTree)Children[Children.Count - 1]; startIndex = firstChild.TokenStartIndex; stopIndex = lastChild.TokenStopIndex; } } public override string ToString() { if (IsNil) { return "nil"; } if (Type == TokenTypes.Invalid) { return "<errornode>"; } if (token == null) { return string.Empty; } return token.Text; } } }
using System; using System.Collections.Generic; using System.Diagnostics; using System.Runtime.InteropServices; using System.Linq; namespace Python.Runtime { /// <summary> /// Common base class for all objects that are implemented in managed /// code. It defines the common fields that associate CLR and Python /// objects and common utilities to convert between those identities. /// </summary> [Serializable] internal abstract class ManagedType { internal enum TrackTypes { Untrack, Extension, Wrapper, } [NonSerialized] internal GCHandle gcHandle; // Native handle internal IntPtr pyHandle; // PyObject * internal IntPtr tpHandle; // PyType * internal BorrowedReference ObjectReference => new BorrowedReference(pyHandle); private static readonly Dictionary<ManagedType, TrackTypes> _managedObjs = new Dictionary<ManagedType, TrackTypes>(); internal void IncrRefCount() { Runtime.XIncref(pyHandle); } internal void DecrRefCount() { Runtime.XDecref(pyHandle); } internal long RefCount { get { var gs = Runtime.PyGILState_Ensure(); try { return Runtime.Refcount(pyHandle); } finally { Runtime.PyGILState_Release(gs); } } } internal GCHandle AllocGCHandle(TrackTypes track = TrackTypes.Untrack) { gcHandle = GCHandle.Alloc(this); if (track != TrackTypes.Untrack && PythonEngine.ShutdownMode == ShutdownMode.Reload) { _managedObjs.Add(this, track); } return gcHandle; } internal void FreeGCHandle() { if (PythonEngine.ShutdownMode == ShutdownMode.Reload) { _managedObjs.Remove(this); } if (gcHandle.IsAllocated) { gcHandle.Free(); gcHandle = default; } } internal static object GetManagedObject(BorrowedReference ob) => GetManagedObject(ob.DangerousGetAddress()); /// <summary> /// Given a Python object, return the associated managed object or null. /// </summary> internal static object GetManagedObject(IntPtr ob) { if (ob != IntPtr.Zero) { IntPtr tp = Runtime.PyObject_TYPE(ob); if (tp == Runtime.PyTypeType || tp == Runtime.PyCLRMetaType) { tp = ob; } var flags = Util.ReadCLong(tp, TypeOffset.tp_flags); if ((flags & TypeFlags.Managed) != 0) { IntPtr op = tp == ob ? Marshal.ReadIntPtr(tp, TypeOffset.magic()) : Marshal.ReadIntPtr(ob, ObjectOffset.magic(tp)); if (op == IntPtr.Zero) { return null; } return GCHandle.FromIntPtr(op).Target; } } return null; } internal static ManagedType GetManagedObjectErr(IntPtr ob) { var result = (ManagedType)GetManagedObject(ob); if (result == null) { Exceptions.SetError(Exceptions.TypeError, "invalid argument, expected CLR type"); } return result; } internal static bool IsManagedType(BorrowedReference ob) => IsManagedType(ob.DangerousGetAddressOrNull()); internal static bool IsManagedType(IntPtr ob) { if (ob != IntPtr.Zero) { IntPtr tp = Runtime.PyObject_TYPE(ob); if (tp == Runtime.PyTypeType || tp == Runtime.PyCLRMetaType) { tp = ob; } var flags = Util.ReadCLong(tp, TypeOffset.tp_flags); if ((flags & TypeFlags.Managed) != 0) { return true; } } return false; } public bool IsTypeObject() { return pyHandle == tpHandle; } internal static IDictionary<ManagedType, TrackTypes> GetManagedObjects() { return _managedObjs; } internal static void ClearTrackedObjects() { _managedObjs.Clear(); } internal static int PyVisit(IntPtr ob, IntPtr visit, IntPtr arg) { if (ob == IntPtr.Zero) { return 0; } var visitFunc = NativeCall.GetDelegate<Interop.ObjObjFunc>(visit); return visitFunc(ob, arg); } /// <summary> /// Wrapper for calling tp_clear /// </summary> internal void CallTypeClear() { if (tpHandle == IntPtr.Zero || pyHandle == IntPtr.Zero) { return; } var clearPtr = Marshal.ReadIntPtr(tpHandle, TypeOffset.tp_clear); if (clearPtr == IntPtr.Zero) { return; } var clearFunc = NativeCall.GetDelegate<Interop.InquiryFunc>(clearPtr); clearFunc(pyHandle); } /// <summary> /// Wrapper for calling tp_traverse /// </summary> internal void CallTypeTraverse(Interop.ObjObjFunc visitproc, IntPtr arg) { if (tpHandle == IntPtr.Zero || pyHandle == IntPtr.Zero) { return; } var traversePtr = Marshal.ReadIntPtr(tpHandle, TypeOffset.tp_traverse); if (traversePtr == IntPtr.Zero) { return; } var traverseFunc = NativeCall.GetDelegate<Interop.ObjObjArgFunc>(traversePtr); var visiPtr = Marshal.GetFunctionPointerForDelegate(visitproc); traverseFunc(pyHandle, visiPtr, arg); } protected void TypeClear() { ClearObjectDict(pyHandle); } internal void Save(InterDomainContext context) { OnSave(context); } internal void Load(InterDomainContext context) { OnLoad(context); } protected virtual void OnSave(InterDomainContext context) { } protected virtual void OnLoad(InterDomainContext context) { } protected static void ClearObjectDict(IntPtr ob) { IntPtr dict = GetObjectDict(ob); if (dict == IntPtr.Zero) { return; } SetObjectDict(ob, IntPtr.Zero); Runtime.XDecref(dict); } protected static IntPtr GetObjectDict(IntPtr ob) { IntPtr type = Runtime.PyObject_TYPE(ob); return Marshal.ReadIntPtr(ob, ObjectOffset.TypeDictOffset(type)); } protected static void SetObjectDict(IntPtr ob, IntPtr value) { IntPtr type = Runtime.PyObject_TYPE(ob); Marshal.WriteIntPtr(ob, ObjectOffset.TypeDictOffset(type), value); } } }
//============================================================================= // System : Sandcastle Help File Builder Utilities // File : ProjectElement.cs // Author : Eric Woodruff ([email protected]) // Updated : 04/20/2009 // Note : Copyright 2008-2009, Eric Woodruff, All rights reserved // Compiler: Microsoft Visual C# // // This file contains a wrapper class for build items in the project. // // This code is published under the Microsoft Public License (Ms-PL). A copy // of the license should be distributed with the code. It can also be found // at the project website: http://SHFB.CodePlex.com. This notice, the // author's name, and all copyright notices must remain intact in all // applications, documentation, and source files. // // Version Date Who Comments // ============================================================================ // 1.8.0.0 06/23/2008 EFW Created the code //============================================================================= using System; using System.ComponentModel; using Microsoft.Build.Evaluation; namespace SandcastleBuilder.Utils { /// <summary> /// This class is a wrapper for build items in the project. /// </summary> public sealed class ProjectElement { #region Constants //===================================================================== /// <summary>Build action</summary> public const string BuildAction = "BuildAction"; /// <summary>Include item</summary> public const string IncludePath = "Include"; /// <summary>File reference hint path</summary> public const string HintPath = "HintPath"; /// <summary>Linked item path</summary> public const string LinkPath = "Link"; /// <summary>Project GUID item</summary> public const string ProjectGuid = "Project"; /// <summary>Project name item</summary> public const string Name = "Name"; /// <summary>Project COM object's GUID</summary> public const string Guid = "Guid"; /// <summary>Project COM object's major version</summary> public const string VersionMajor = "VersionMajor"; /// <summary>Project COM object's minor version</summary> public const string VersionMinor = "VersionMinor"; /// <summary>Project COM object's wrapper tool</summary> public const string WrapperTool = "WrapperTool"; /// <summary>Configuration setting</summary> public const string Configuration = "Configuration"; /// <summary>Platform setting</summary> public const string Platform = "Platform"; /// <summary>Output directory setting</summary> public const string OutDir = "OutDir"; /// <summary>Assembly name.</summary> public const string Assembly = "Assembly"; /// <summary>Image ID</summary> public const string ImageId = "ImageId"; /// <summary>Alternate text</summary> public const string AlternateText = "AlternateText"; /// <summary>Copy to media folder</summary> public const string CopyToMedia = "CopyToMedia"; /// <summary>Exclude from table of contents</summary> public const string ExcludeFromToc = "ExcludeFromToc"; /// <summary>Sort order</summary> public const string SortOrder = "SortOrder"; // Visual Studio solution macros /// <summary>Solution path (directory and filename)</summary> public const string SolutionPath = "SolutionPath"; /// <summary>Solution directory</summary> public const string SolutionDir = "SolutionDir"; /// <summary>Solution filename (no path)</summary> public const string SolutionFileName = "SolutionFileName"; /// <summary>Solution name (no path or extension)</summary> public const string SolutionName = "SolutionName"; /// <summary>Solution extension</summary> public const string SolutionExt = "SolutionExt"; #endregion #region Private data members //===================================================================== private SandcastleProject projectFile; private ProjectItem item; #endregion #region Properties //===================================================================== /// <summary> /// This is used to set or get the item name (a.k.a BuildAction) /// </summary> public string ItemName { get { return item.ItemType; } set { this.CheckProjectIsEditable(); item.ItemType = value; projectFile.MarkAsDirty(); } } /// <summary> /// This is used to set or get the filename (Include attribute) /// </summary> public string Include { get { return item.EvaluatedInclude; } set { if (item.EvaluatedInclude != value) { if(String.IsNullOrEmpty(value) || value.IndexOfAny(new char[] { '*', '?' }) != -1) throw new ArgumentException("The filename cannot be " + "blank and cannot contain wildcards (* or ?)"); // Folder items must end in a backslash if(item.ItemType == Utils.BuildAction.Folder.ToString() && value[value.Length - 1] != '\\') value += @"\"; this.CheckProjectIsEditable(); item.UnevaluatedInclude = value; projectFile.MarkAsDirty(); } } } /// <summary> /// This read-only property is used to get the containing project /// </summary> public SandcastleProject Project { get { return projectFile; } } #endregion #region Constructors //===================================================================== /// <summary> /// This constructor is used to wrap an existing project item. /// </summary> /// <param name="project">The project that owns the item</param> /// <param name="existingItem">The existing item</param> /// <overloads>There are two overloads for the constructor</overloads> internal ProjectElement(SandcastleProject project, ProjectItem existingItem) { if(project == null) throw new ArgumentNullException("project"); if(existingItem == null) throw new ArgumentNullException("existingItem"); projectFile = project; item = existingItem; } /// <summary> /// This constructor is used to create a new build item and add it to /// the project. /// </summary> /// <param name="project">The project that will own the item</param> /// <param name="itemType">The type of build item to create</param> /// <param name="itemPath">The path to the item. This can be relative /// or absolute and may contain variable references.</param> internal ProjectElement(SandcastleProject project, string itemType, string itemPath) { if(project == null) throw new ArgumentNullException("project"); if(String.IsNullOrEmpty(itemPath)) throw new ArgumentException("Cannot be null or empty", "itemPath"); if(String.IsNullOrEmpty(itemType)) throw new ArgumentException("Cannot be null or empty", "itemType"); projectFile = project; this.CheckProjectIsEditable(); if(itemType == Utils.BuildAction.Folder.ToString() && itemPath[itemPath.Length - 1] != '\\') itemPath += @"\"; item = project.MSBuildProject.AddItem(itemType, itemPath)[0]; projectFile.MarkAsDirty(); } #endregion #region Project item methods //===================================================================== /// <summary> /// This is used to see if the project can be edited. If not, abort /// the change by throwing an exception. /// </summary> private void CheckProjectIsEditable() { CancelEventArgs ce = new CancelEventArgs(); projectFile.OnQueryEditProjectFile(ce); if(ce.Cancel) throw new OperationCanceledException("Project cannot be edited"); } /// <summary> /// See if the named metadata item exists /// </summary> /// <param name="name">The metadata name for which to check</param> /// <returns>True if present, false if not</returns> public bool HasMetadata(string name) { // Build Action is the name, not metadata. // Include is an attribute, not metadata. if(String.Compare(name, ProjectElement.BuildAction, StringComparison.OrdinalIgnoreCase) == 0 || String.Compare(name, ProjectElement.IncludePath, StringComparison.OrdinalIgnoreCase) == 0) return true; return item.HasMetadata(name); } /// <summary> /// Get a metadata value from a project element /// </summary> /// <param name="name">The name of the metadata element to get</param> /// <returns>The value of the metadata element</returns> public string GetMetadata(string name) { // Build Action is the name, not metadata if(String.Compare(name, ProjectElement.BuildAction, StringComparison.OrdinalIgnoreCase) == 0) return item.ItemType; // Include is an attribute, not metadata if(String.Compare(name, ProjectElement.IncludePath, StringComparison.OrdinalIgnoreCase) == 0) return item.EvaluatedInclude; return item.GetMetadataValue(name); } /// <summary> /// Set a metadata value in the project item /// </summary> /// <param name="name">The name of the metadata element</param> /// <param name="value">The value to store in the element</param> public void SetMetadata(string name, string value) { this.CheckProjectIsEditable(); // Build Action is the name, not metadata if(String.Compare(name, ProjectElement.BuildAction, StringComparison.OrdinalIgnoreCase) == 0) { item.ItemType = value; return; } // Include is an attribute, not metadata if(String.Compare(name, ProjectElement.IncludePath, StringComparison.OrdinalIgnoreCase) == 0) { item.UnevaluatedInclude = value; return; } if(String.IsNullOrEmpty(value)) item.RemoveMetadata(name); else item.SetMetadataValue(name, value); projectFile.MarkAsDirty(); } /// <summary> /// Remove the item from the project /// </summary> public void RemoveFromProjectFile() { this.CheckProjectIsEditable(); projectFile.MSBuildProject.RemoveItem(item); projectFile = null; item = null; } #endregion #region Equality, ToString, etc. //===================================================================== /// <summary> /// Overload for equal operator /// </summary> /// <param name="element1">The first element object</param> /// <param name="element2">The second element object</param> /// <returns>True if equal, false if not.</returns> public static bool operator == (ProjectElement element1, ProjectElement element2) { // Do they reference the same element? if(Object.ReferenceEquals(element1, element2)) return true; // Check null reference first (cast to object first to avoid // stack overflow). if(element1 as object == null || element2 as object == null) return false; // Do they reference the same project? if(!element1.projectFile.Equals(element2.projectFile)) return false; return String.Equals(element1.GetMetadata(ProjectElement.IncludePath), element2.GetMetadata(ProjectElement.IncludePath), StringComparison.CurrentCultureIgnoreCase); } /// <summary> /// Overload for not equal operator /// </summary> /// <param name="element1">The first element object</param> /// <param name="element2">The second element object</param> /// <returns>True if not equal, false if they are equal.</returns> public static bool operator !=(ProjectElement element1, ProjectElement element2) { return !(element1 == element2); } /// <inheritdoc /> public override bool Equals(object obj) { ProjectElement element2 = obj as ProjectElement; if(element2 == null) return false; return this == element2; } /// <inheritdoc /> public override int GetHashCode() { return item.EvaluatedInclude.GetHashCode(); } #endregion } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System.Collections.Immutable; using System.Composition; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis.CodeActions; using Microsoft.CodeAnalysis.CodeFixes; using Microsoft.CodeAnalysis.CodeFixes.Iterator; using Microsoft.CodeAnalysis.CSharp.Syntax; using Microsoft.CodeAnalysis.Formatting; using Microsoft.CodeAnalysis.Shared.Extensions; using Microsoft.CodeAnalysis.Text; namespace Microsoft.CodeAnalysis.CSharp.CodeFixes.Iterator { [ExportCodeFixProvider(LanguageNames.CSharp, Name = PredefinedCodeFixProviderNames.ChangeToYield), Shared] internal class CSharpAddYieldCodeFixProvider : AbstractIteratorCodeFixProvider { /// <summary> /// CS0029: Cannot implicitly convert from type 'x' to 'y' /// </summary> private const string CS0029 = nameof(CS0029); /// <summary> /// CS0266: Cannot implicitly convert from type 'x' to 'y'. An explicit conversion exists (are you missing a cast?) /// </summary> private const string CS0266 = nameof(CS0266); public override ImmutableArray<string> FixableDiagnosticIds { get { return ImmutableArray.Create(CS0029, CS0266); } } protected override async Task<CodeAction> GetCodeFixAsync(SyntaxNode root, SyntaxNode node, Document document, Diagnostic diagnostics, CancellationToken cancellationToken) { // Check if node is return statement if (!node.IsKind(SyntaxKind.ReturnStatement)) { return null; } var returnStatement = node as ReturnStatementSyntax; var model = await document.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false); ITypeSymbol methodReturnType; if (!TryGetMethodReturnType(node, model, cancellationToken, out methodReturnType)) { return null; } ITypeSymbol returnExpressionType; if (!TryGetExpressionType(model, returnStatement.Expression, out returnExpressionType)) { return null; } var typeArguments = methodReturnType.GetAllTypeArguments(); var shouldOfferYieldReturn = typeArguments.Length != 1 ? IsCorrectTypeForYieldReturn(returnExpressionType, methodReturnType, model) : IsCorrectTypeForYieldReturn(typeArguments.Single(), returnExpressionType, methodReturnType, model); if (!shouldOfferYieldReturn) { return null; } var yieldStatement = SyntaxFactory.YieldStatement( SyntaxKind.YieldReturnStatement, returnStatement.Expression) .WithAdditionalAnnotations(Formatter.Annotation); root = root.ReplaceNode(returnStatement, yieldStatement); return new MyCodeAction(CSharpFeaturesResources.ChangeToYieldReturn, document.WithSyntaxRoot(root)); } private bool TryGetExpressionType(SemanticModel model, ExpressionSyntax expression, out ITypeSymbol returnExpressionType) { var info = model.GetTypeInfo(expression); returnExpressionType = info.Type; return returnExpressionType != null; } private bool TryGetMethodReturnType(SyntaxNode node, SemanticModel model, CancellationToken cancellationToken, out ITypeSymbol methodReturnType) { methodReturnType = null; var symbol = model.GetEnclosingSymbol(node.Span.Start, cancellationToken); var method = symbol as IMethodSymbol; if (method == null || method.ReturnsVoid) { return false; } methodReturnType = method.ReturnType; return methodReturnType != null; } private bool IsCorrectTypeForYieldReturn(ITypeSymbol typeArgument, ITypeSymbol returnExpressionType, ITypeSymbol methodReturnType, SemanticModel model) { var ienumerableSymbol = model.Compilation.GetTypeByMetadataName("System.Collections.IEnumerable"); var ienumeratorSymbol = model.Compilation.GetTypeByMetadataName("System.Collections.IEnumerator"); var ienumerableGenericSymbol = model.Compilation.GetTypeByMetadataName("System.Collections.Generic.IEnumerable`1"); var ienumeratorGenericSymbol = model.Compilation.GetTypeByMetadataName("System.Collections.Generic.IEnumerator`1"); if (ienumerableGenericSymbol == null || ienumerableSymbol == null || ienumeratorGenericSymbol == null || ienumeratorSymbol == null) { return false; } ienumerableGenericSymbol = ienumerableGenericSymbol.Construct(typeArgument); ienumeratorGenericSymbol = ienumeratorGenericSymbol.Construct(typeArgument); if (!CanConvertTypes(typeArgument, returnExpressionType, model)) { return false; } if (!(methodReturnType.Equals(ienumerableGenericSymbol) || methodReturnType.Equals(ienumerableSymbol) || methodReturnType.Equals(ienumeratorGenericSymbol) || methodReturnType.Equals(ienumeratorSymbol))) { return false; } return true; } private bool CanConvertTypes(ITypeSymbol typeArgument, ITypeSymbol returnExpressionType, SemanticModel model) { // return false if there is no conversion for the top level type if (!model.Compilation.ClassifyConversion(typeArgument, returnExpressionType).Exists) { return false; } // Classify conversion does not consider type parameters on its own so we will have to recurse through them var leftArguments = typeArgument.GetTypeArguments(); var rightArguments = returnExpressionType.GetTypeArguments(); // If we have a mismatch in the number of type arguments we can immediately return as there is no way the types are convertible if ((leftArguments != null && rightArguments != null) && leftArguments.Length != rightArguments.Length) { return false; } // If there are no more type arguments we assume they are convertible since the outer generic types are convertible if (leftArguments == null || !leftArguments.Any()) { return true; } // Check if all the type arguments are convertible for (int i = 0; i < leftArguments.Length; i++) { if (!CanConvertTypes(leftArguments[i], rightArguments[i], model)) { return false; } } // Type argument comparisons have all succeeded, return true return true; } private bool IsCorrectTypeForYieldReturn(ITypeSymbol returnExpressionType, ITypeSymbol methodReturnType, SemanticModel model) { var ienumerableSymbol = model.Compilation.GetTypeByMetadataName("System.Collections.IEnumerable"); var ienumeratorSymbol = model.Compilation.GetTypeByMetadataName("System.Collections.IEnumerator"); if (ienumerableSymbol == null || ienumeratorSymbol == null) { return false; } if (!(methodReturnType.Equals(ienumerableSymbol) || methodReturnType.Equals(ienumeratorSymbol))) { return false; } return true; } protected override bool TryGetNode(SyntaxNode root, TextSpan span, out SyntaxNode node) { node = null; var ancestors = root.FindToken(span.Start).GetAncestors<SyntaxNode>(); if (!ancestors.Any()) { return false; } node = ancestors.FirstOrDefault((n) => n.Span.Contains(span) && n != root && n.IsKind(SyntaxKind.ReturnStatement)); return node != null; } private class MyCodeAction : CodeAction.DocumentChangeAction { public MyCodeAction(string title, Document newDocument) : base(title, c => Task.FromResult(newDocument)) { } } } }
#region Header // // CmdRollingOffset.cs - calculate a rolling offset pipe segment between two existing pipes and hook them up // // Copyright (C) 2013-2021 by Jeremy Tammik, Autodesk Inc. All rights reserved. // // Keywords: The Building Coder Revit API C# .NET add-in. // #endregion // Header #region Namespaces using System; using System.Diagnostics; using System.Linq; using Autodesk.Revit.Attributes; using Autodesk.Revit.DB; using Autodesk.Revit.DB.Plumbing; using Autodesk.Revit.DB.Structure; using Autodesk.Revit.UI; #endregion // Namespaces namespace BuildingCoder { [Transaction(TransactionMode.Manual)] internal class CmdRollingOffset : IExternalCommand { private const string _prompt = "Please run this in a model containing " + "exactly two parallel offset pipe elements, " + "and they will be automatically selected. " + "Alternatively, pre-select two pipe elements " + "before launching this command, or post-select " + "them when prompted."; private const BuiltInParameter bipDiameter = BuiltInParameter.RBS_PIPE_DIAMETER_PARAM; /// <summary> /// This command can place either a model line /// to represent the rolling offset calculation /// result, or insert a real pipe segment and the /// associated fittings. /// </summary> private static readonly bool _place_model_line = false; /// <summary> /// Place the two 45 degree fittings and connect /// them instead of explicitly placing the /// rolling offset pipe segment. /// </summary> private static readonly bool _place_fittings = false; /// <summary> /// Switch between the new static Pipe.Create /// method and the obsolete /// Document.Create.NewPipe. /// </summary> private static readonly bool _use_static_pipe_create = true; public Result Execute( ExternalCommandData commandData, ref string message, ElementSet elements) { var uiapp = commandData.Application; var uidoc = uiapp.ActiveUIDocument; var app = uiapp.Application; var doc = uidoc.Document; //// Select all pipes in the entire model. //List<Pipe> pipes = new List<Pipe>( // new FilteredElementCollector( doc ) // .OfClass( typeof( Pipe ) ) // .ToElements() // .Cast<Pipe>() ); //int n = pipes.Count; //// If there are less than two, //// there is nothing we can do. //if( 2 > n ) //{ // message = _prompt; // return Result.Failed; //} //// If there are exactly two, pick those. //if( 2 < n ) //{ // // Else, check for a pre-selection. // pipes.Clear(); // Selection sel = uidoc.Selection; // //n = sel.Elements.Size; // 2014 // ICollection<ElementId> ids // = sel.GetElementIds(); // 2015 // n = ids.Count; // 2015 // Debug.Print( "{0} pre-selected elements.", // n ); // // If two or more model pipes were pre- // // selected, use the first two encountered. // if( 1 < n ) // { // //foreach( Element e in sel.Elements ) // 2014 // foreach( ElementId id in ids ) // 2015 // { // Pipe c = doc.GetElement( id ) as Pipe; // if( null != c ) // { // pipes.Add( c ); // if( 2 == pipes.Count ) // { // Debug.Print( "Found two model pipes, " // + "ignoring everything else." ); // break; // } // } // } // } // // Else, prompt for an // // interactive post-selection. // if( 2 != pipes.Count ) // { // pipes.Clear(); // try // { // Reference r = sel.PickObject( // ObjectType.Element, // new PipeElementSelectionFilter(), // "Please pick first pipe." ); // pipes.Add( doc.GetElement( r.ElementId ) // as Pipe ); // } // catch( Autodesk.Revit.Exceptions // .OperationCanceledException ) // { // return Result.Cancelled; // } // try // { // Reference r = sel.PickObject( // ObjectType.Element, // new PipeElementSelectionFilter(), // "Please pick second pipe." ); // pipes.Add( doc.GetElement( r.ElementId ) // as Pipe ); // } // catch( Autodesk.Revit.Exceptions // .OperationCanceledException ) // { // return Result.Cancelled; // } // } //} var picker = new JtPairPicker<Pipe>(uidoc); var rc = picker.Pick(); if (Result.Failed == rc) message = _prompt; if (Result.Succeeded != rc) return rc; var pipes = picker.Selected; // Check for same pipe system type. var systemTypeId = pipes[0].MEPSystem.GetTypeId(); Debug.Assert(pipes[1].MEPSystem.GetTypeId() .IntegerValue.Equals( systemTypeId.IntegerValue), "expected two similar pipes"); // Check for same pipe level. var levelId = pipes[0].LevelId; Debug.Assert( pipes[1].LevelId.IntegerValue.Equals( levelId.IntegerValue), "expected two pipes on same level"); // Extract data from the two selected pipes. var wall_thickness = GetWallThickness(pipes[0]); Debug.Print("{0} has wall thickness {1}", Util.ElementDescription(pipes[0]), Util.RealString(wall_thickness)); var c0 = pipes[0].GetCurve(); var c1 = pipes[1].GetCurve(); if (!(c0 is Line) || !(c1 is Line)) { message = $"{_prompt} Expected straight pipes."; return Result.Failed; } var p00 = c0.GetEndPoint(0); var p01 = c0.GetEndPoint(1); var p10 = c1.GetEndPoint(0); var p11 = c1.GetEndPoint(1); var v0 = p01 - p00; var v1 = p11 - p10; if (!Util.IsParallel(v0, v1)) { message = $"{_prompt} Expected parallel pipes."; return Result.Failed; } // Select the two pipe endpoints // that are farthest apart. var p0 = p00.DistanceTo(p10) > p01.DistanceTo(p10) ? p00 : p01; var p1 = p10.DistanceTo(p0) > p11.DistanceTo(p0) ? p10 : p11; var pm = 0.5 * (p0 + p1); var v = p1 - p0; if (Util.IsParallel(v, v0)) { message = "The selected pipes are colinear."; return Result.Failed; } // Normal vector of the plane defined by the // two parallel and offset pipes, which is // the plane hosting the rolling offset var z = v.CrossProduct(v1); // Vector perpendicular to v0 and v0 and // z, i.e. vector pointing from the first pipe // to the second in the cross sectional view. var w = z.CrossProduct(v1).Normalize(); // Offset distance perpendicular to pipe direction var distanceAcross = Math.Abs( v.DotProduct(w)); // Distance between endpoints parallel // to pipe direction var distanceAlong = Math.Abs( v.DotProduct(v1.Normalize())); Debug.Assert(Util.IsEqual(v.GetLength(), Math.Sqrt(distanceAcross * distanceAcross + distanceAlong * distanceAlong)), "expected Pythagorean equality here"); // The required offset pipe angle. var angle = 45 * Math.PI / 180.0; // The angle on the other side. var angle2 = 0.5 * Math.PI - angle; var length = distanceAcross * Math.Tan(angle2); var halfLength = 0.5 * length; // How long should the pipe stubs become? var remainingPipeLength = 0.5 * (distanceAlong - length); if (0 > v1.DotProduct(v)) v1.Negate(); v1 = v1.Normalize(); var q0 = p0 + remainingPipeLength * v1; var q1 = p1 - remainingPipeLength * v1; using var tx = new Transaction(doc); // Determine pipe diameter for creating // matching pipes and fittings var pipe = pipes[0]; var diameter = pipe .get_Parameter(bipDiameter) // "Diameter" .AsDouble(); // Pipe type for calls to doc.Create.NewPipe var pipe_type_standard = new FilteredElementCollector(doc) .OfClass(typeof(PipeType)) .Cast<PipeType>() .Where(e => e.Name.Equals("Standard")) .FirstOrDefault(); Debug.Assert( pipe_type_standard.Id.IntegerValue.Equals( pipe.PipeType.Id.IntegerValue), "expected all pipes in this simple " + "model to use the same pipe type"); tx.Start("Rolling Offset"); if (_place_model_line) { // Trim or extend existing pipes (pipes[0].Location as LocationCurve).Curve = Line.CreateBound(p0, q0); (pipes[1].Location as LocationCurve).Curve = Line.CreateBound(p1, q1); // Add a model line for the rolling offset pipe var creator = new Creator(doc); var line = Line.CreateBound(q0, q1); creator.CreateModelCurve(line); pipe = null; } else if (_place_fittings) { // Set active work plane to the rolling // offset plane... removed again, since // this has no effect at all on the // fitting placement or rotation. // //Plane plane = new Plane( z, q0 ); // //SketchPlane sp = SketchPlane.Create( // doc, plane ); // //uidoc.ActiveView.SketchPlane = sp; //uidoc.ActiveView.ShowActiveWorkPlane(); var symbol = new FilteredElementCollector(doc) .OfClass(typeof(FamilySymbol)) .OfCategory(BuiltInCategory.OST_PipeFitting) .Cast<FamilySymbol>() .Where(e => e.Family.Name.Contains("Elbow - Generic")) .FirstOrDefault(); // Set up first 45 degree elbow fitting var fitting0 = doc.Create .NewFamilyInstance(q0, symbol, StructuralType.NonStructural); fitting0.LookupParameter("Angle").Set( 45.0 * Math.PI / 180.0); //fitting0.get_Parameter( bipDiameter ) // does not exist // .Set( diameter ); fitting0.LookupParameter("Nominal Radius") .Set(0.5 * diameter); var axis = Line.CreateBound(p0, q0); angle = z.AngleTo(XYZ.BasisZ); ElementTransformUtils.RotateElement( doc, fitting0.Id, axis, Math.PI - angle); var con0 = Util.GetConnectorClosestTo( fitting0, p0); // Trim or extend existing pipe (pipes[0].Location as LocationCurve).Curve = Line.CreateBound(p0, con0.Origin); // Connect pipe to fitting Util.Connect(con0.Origin, pipe, fitting0); // Set up second 45 degree elbow fitting var fitting1 = doc.Create .NewFamilyInstance(q1, symbol, StructuralType.NonStructural); //fitting1.get_Parameter( "Angle" ).Set( 45.0 * Math.PI / 180.0 ); // 2014 //fitting1.get_Parameter( "Nominal Radius" ).Set( 0.5 * diameter ); // 2014 fitting1.LookupParameter("Angle").Set(45.0 * Math.PI / 180.0); // 2015 fitting1.LookupParameter("Nominal Radius").Set(0.5 * diameter); // 2015 axis = Line.CreateBound( q1, q1 + XYZ.BasisZ); ElementTransformUtils.RotateElement( doc, fitting1.Id, axis, Math.PI); axis = Line.CreateBound(q1, p1); ElementTransformUtils.RotateElement( doc, fitting1.Id, axis, Math.PI - angle); var con1 = Util.GetConnectorClosestTo( fitting1, p1); (pipes[1].Location as LocationCurve).Curve = Line.CreateBound(con1.Origin, p1); Util.Connect(con1.Origin, fitting1, pipes[1]); con0 = Util.GetConnectorClosestTo( fitting0, pm); con1 = Util.GetConnectorClosestTo( fitting1, pm); // Connecting one fitting to the other does // not insert a pipe in between. If the // system is edited later, however, the two // fittings snap together. // //con0.ConnectTo( con1 ); // Create rolling offset pipe segment //pipe = doc.Create.NewPipe( con0.Origin, // 2014 // con1.Origin, pipe_type_standard ); pipe = Pipe.Create(doc, pipe_type_standard.Id, levelId, con0, con1); // 2015 pipe.get_Parameter(bipDiameter) .Set(diameter); // Connect rolling offset pipe segment // with elbow fittings at each end Util.Connect(con0.Origin, fitting0, pipe); Util.Connect(con1.Origin, pipe, fitting1); } else { if (_use_static_pipe_create) { // Element id arguments to Pipe.Create. ElementId idSystem; ElementId idType; ElementId idLevel; // All these values are invalid for idSystem: var idSystem1 = pipe.MEPSystem.Id; var idSystem2 = ElementId.InvalidElementId; var idSystem3 = PipingSystem.Create( doc, pipe.MEPSystem.GetTypeId(), "Tbc") .Id; // This throws an argument exception saying // The systemTypeId is not valid piping system type. // Parameter name: systemTypeId //pipe = Pipe.Create( doc, idSystem, // idType, idLevel, q0, q1 ); // Retrieve pipe system type, e.g. // hydronic supply. var pipingSystemType = new FilteredElementCollector(doc) .OfClass(typeof(PipingSystemType)) .OfType<PipingSystemType>() .FirstOrDefault(st => st.SystemClassification == MEPSystemClassification .SupplyHydronic); if (null == pipingSystemType) { message = "Could not find hydronic supply piping system type"; return Result.Failed; } idSystem = pipingSystemType.Id; Debug.Assert(pipe.get_Parameter( BuiltInParameter.RBS_PIPING_SYSTEM_TYPE_PARAM) .AsElementId().IntegerValue.Equals( idSystem.IntegerValue), "expected same piping system element id"); // Retrieve the PipeType. var pipeType = new FilteredElementCollector(doc) .OfClass(typeof(PipeType)) .OfType<PipeType>() .FirstOrDefault(); if (null == pipeType) { message = "Could not find pipe type"; return Result.Failed; } idType = pipeType.Id; Debug.Assert(pipe.get_Parameter( BuiltInParameter.ELEM_TYPE_PARAM) .AsElementId().IntegerValue.Equals( idType.IntegerValue), "expected same pipe type element id"); Debug.Assert(pipe.PipeType.Id.IntegerValue .Equals(idType.IntegerValue), "expected same pipe type element id"); // Retrieve the reference level. // pipe.LevelId is not the correct source! idLevel = pipe.get_Parameter( BuiltInParameter.RBS_START_LEVEL_PARAM) .AsElementId(); // Create the rolling offset pipe. pipe = Pipe.Create(doc, idSystem, idType, idLevel, q0, q1); } else { //pipe = doc.Create.NewPipe( q0, q1, pipe_type_standard ); // 2014 pipe = Pipe.Create(doc, systemTypeId, pipe_type_standard.Id, levelId, q0, q1); // 2015 } pipe.get_Parameter(bipDiameter) .Set(diameter); // Connect rolling offset pipe segment // directly with the neighbouring original // pipes // //Util.Connect( q0, pipes[0], pipe ); //Util.Connect( q1, pipe, pipes[1] ); // NewElbowFitting performs the following: // - select appropriate fitting family and type // - place and orient a family instance // - set its parameters appropriately // - connect it with its neighbours var con0 = Util.GetConnectorClosestTo( pipes[0], q0); var con = Util.GetConnectorClosestTo( pipe, q0); doc.Create.NewElbowFitting(con0, con); var con1 = Util.GetConnectorClosestTo( pipes[1], q1); con = Util.GetConnectorClosestTo( pipe, q1); doc.Create.NewElbowFitting(con, con1); } tx.Commit(); return Result.Succeeded; } #region Victor's Code private Result f( UIDocument uidoc, Document doc) { var message = string.Empty; // Extract all pipe system types var mepSystemTypes = new FilteredElementCollector(doc) .OfClass(typeof(PipingSystemType)) .OfType<PipingSystemType>() .ToList(); // Get the Domestic hot water type var domesticHotWaterSystemType = mepSystemTypes.FirstOrDefault( st => st.SystemClassification == MEPSystemClassification.DomesticHotWater); if (domesticHotWaterSystemType == null) { message = "Could not find Domestic Hot Water System Type"; return Result.Failed; } // Looking for the PipeType var pipeTypes = new FilteredElementCollector(doc) .OfClass(typeof(PipeType)) .OfType<PipeType>() .ToList(); // Get the first type from the collection var firstPipeType = pipeTypes.FirstOrDefault(); if (firstPipeType == null) { message = "Could not find Pipe Type"; return Result.Failed; } var level = uidoc.ActiveView.GenLevel; if (level == null) { message = "Wrong Active View"; return Result.Failed; } var startPoint = XYZ.Zero; var endPoint = new XYZ(100, 0, 0); using (var t = new Transaction(doc)) { t.Start("Create pipe using Pipe.Create"); var pipe = Pipe.Create(doc, domesticHotWaterSystemType.Id, firstPipeType.Id, level.Id, startPoint, endPoint); t.Commit(); } Debug.Print(message); return Result.Succeeded; } #endregion // Victor's Code #region Determine Pipe Wall Thickness private const BuiltInParameter bipDiameterInner = BuiltInParameter.RBS_PIPE_INNER_DIAM_PARAM; private const BuiltInParameter bipDiameterOuter = BuiltInParameter.RBS_PIPE_OUTER_DIAMETER; private static double GetWallThickness(Pipe pipe) { var dinner = pipe.get_Parameter( bipDiameterInner).AsDouble(); var douter = pipe.get_Parameter( bipDiameterOuter).AsDouble(); return 0.5 * (douter - dinner); } #endregion // Determine Pipe Wall Thickness } } // Z:\a\rvt\rolling_offset.rvt // /a/j/adn/case/bsd/1264642/attach/PipeTest.cs
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Runtime.CompilerServices; namespace System.Numerics { // This file contains the definitions for all of the JIT intrinsic methods and properties that are recognized by the current x64 JIT compiler. // The implementation defined here is used in any circumstance where the JIT fails to recognize these members as intrinsic. // The JIT recognizes these methods and properties by name and signature: if either is changed, the JIT will no longer recognize the member. // Some methods declared here are not strictly intrinsic, but delegate to an intrinsic method. For example, only one overload of CopyTo() public partial struct Vector4 { /// <summary> /// The X component of the vector. /// </summary> public Single X; /// <summary> /// The Y component of the vector. /// </summary> public Single Y; /// <summary> /// The Z component of the vector. /// </summary> public Single Z; /// <summary> /// The W component of the vector. /// </summary> public Single W; #region Constructors /// <summary> /// Constructs a vector whose elements are all the single specified value. /// </summary> /// <param name="value">The element to fill the vector with.</param> [JitIntrinsic] public Vector4(Single value) : this(value, value, value, value) { } /// <summary> /// Constructs a vector with the given individual elements. /// </summary> /// <param name="w">W component.</param> /// <param name="x">X component.</param> /// <param name="y">Y component.</param> /// <param name="z">Z component.</param> [JitIntrinsic] public Vector4(Single x, Single y, Single z, Single w) { W = w; X = x; Y = y; Z = z; } /// <summary> /// Constructs a Vector4 from the given Vector2 and a Z and W component. /// </summary> /// <param name="value">The vector to use as the X and Y components.</param> /// <param name="z">The Z component.</param> /// <param name="w">The W component.</param> public Vector4(Vector2 value, Single z, Single w) { X = value.X; Y = value.Y; Z = z; W = w; } /// <summary> /// Constructs a Vector4 from the given Vector3 and a W component. /// </summary> /// <param name="value">The vector to use as the X, Y, and Z components.</param> /// <param name="w">The W component.</param> public Vector4(Vector3 value, Single w) { X = value.X; Y = value.Y; Z = value.Z; W = w; } #endregion Constructors #region Public Instance Methods /// <summary> /// Copies the contents of the vector into the given array. /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public void CopyTo(Single[] array) { CopyTo(array, 0); } /// <summary> /// Copies the contents of the vector into the given array, starting from index. /// </summary> /// <exception cref="ArgumentNullException">If array is null.</exception> /// <exception cref="RankException">If array is multidimensional.</exception> /// <exception cref="ArgumentOutOfRangeException">If index is greater than end of the array or index is less than zero.</exception> /// <exception cref="ArgumentException">If number of elements in source vector is greater than those available in destination array.</exception> [JitIntrinsic] [MethodImpl(MethodImplOptions.AggressiveInlining)] public void CopyTo(Single[] array, int index) { if (array == null) throw new ArgumentNullException("values"); if (index < 0 || index >= array.Length) throw new ArgumentOutOfRangeException(SR.GetString("Arg_ArgumentOutOfRangeException", index)); if ((array.Length - index) < 4) throw new ArgumentException(SR.GetString("Arg_ElementsInSourceIsGreaterThanDestination", index)); array[index] = X; array[index + 1] = Y; array[index + 2] = Z; array[index + 3] = W; } /// <summary> /// Returns a boolean indicating whether the given Vector4 is equal to this Vector4 instance. /// </summary> /// <param name="other">The Vector4 to compare this instance to.</param> /// <returns>True if the other Vector4 is equal to this instance; False otherwise.</returns> [JitIntrinsic] public bool Equals(Vector4 other) { return this.X == other.X && this.Y == other.Y && this.Z == other.Z && this.W == other.W; } #endregion Public Instance Methods #region Public Static Methods /// <summary> /// Returns the dot product of two vectors. /// </summary> /// <param name="vector1">The first vector.</param> /// <param name="vector2">The second vector.</param> /// <returns>The dot product.</returns> [JitIntrinsic] [MethodImpl(MethodImplOptions.AggressiveInlining)] public static float Dot(Vector4 vector1, Vector4 vector2) { return vector1.X * vector2.X + vector1.Y * vector2.Y + vector1.Z * vector2.Z + vector1.W * vector2.W; } /// <summary> /// Returns a vector whose elements are the minimum of each of the pairs of elements in the two source vectors. /// </summary> /// <param name="value1">The first source vector.</param> /// <param name="value2">The second source vector.</param> /// <returns>The minimized vector.</returns> [JitIntrinsic] [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Vector4 Min(Vector4 value1, Vector4 value2) { return new Vector4( (value1.X < value2.X) ? value1.X : value2.X, (value1.Y < value2.Y) ? value1.Y : value2.Y, (value1.Z < value2.Z) ? value1.Z : value2.Z, (value1.W < value2.W) ? value1.W : value2.W); } /// <summary> /// Returns a vector whose elements are the maximum of each of the pairs of elements in the two source vectors. /// </summary> /// <param name="value1">The first source vector.</param> /// <param name="value2">The second source vector.</param> /// <returns>The maximized vector.</returns> [JitIntrinsic] [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Vector4 Max(Vector4 value1, Vector4 value2) { return new Vector4( (value1.X > value2.X) ? value1.X : value2.X, (value1.Y > value2.Y) ? value1.Y : value2.Y, (value1.Z > value2.Z) ? value1.Z : value2.Z, (value1.W > value2.W) ? value1.W : value2.W); } /// <summary> /// Returns a vector whose elements are the absolute values of each of the source vector's elements. /// </summary> /// <param name="value">The source vector.</param> /// <returns>The absolute value vector.</returns> [JitIntrinsic] [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Vector4 Abs(Vector4 value) { return new Vector4(Math.Abs(value.X), Math.Abs(value.Y), Math.Abs(value.Z), Math.Abs(value.W)); } /// <summary> /// Returns a vector whose elements are the square root of each of the source vector's elements. /// </summary> /// <param name="value">The source vector.</param> /// <returns>The square root vector.</returns> [JitIntrinsic] [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Vector4 SquareRoot(Vector4 value) { return new Vector4((Single)Math.Sqrt(value.X), (Single)Math.Sqrt(value.Y), (Single)Math.Sqrt(value.Z), (Single)Math.Sqrt(value.W)); } #endregion Public Static Methods #region Public static operators /// <summary> /// Adds two vectors together. /// </summary> /// <param name="left">The first source vector.</param> /// <param name="right">The second source vector.</param> /// <returns>The summed vector.</returns> [JitIntrinsic] [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Vector4 operator +(Vector4 left, Vector4 right) { return new Vector4(left.X + right.X, left.Y + right.Y, left.Z + right.Z, left.W + right.W); } /// <summary> /// Subtracts the second vector from the first. /// </summary> /// <param name="left">The first source vector.</param> /// <param name="right">The second source vector.</param> /// <returns>The difference vector.</returns> [JitIntrinsic] [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Vector4 operator -(Vector4 left, Vector4 right) { return new Vector4(left.X - right.X, left.Y - right.Y, left.Z - right.Z, left.W - right.W); } /// <summary> /// Multiplies two vectors together. /// </summary> /// <param name="left">The first source vector.</param> /// <param name="right">The second source vector.</param> /// <returns>The product vector.</returns> [JitIntrinsic] [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Vector4 operator *(Vector4 left, Vector4 right) { return new Vector4(left.X * right.X, left.Y * right.Y, left.Z * right.Z, left.W * right.W); } /// <summary> /// Multiplies a vector by the given scalar. /// </summary> /// <param name="left">The source vector.</param> /// <param name="right">The scalar value.</param> /// <returns>The scaled vector.</returns> [JitIntrinsic] [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Vector4 operator *(Vector4 left, Single right) { return left * new Vector4(right); } /// <summary> /// Multiplies a vector by the given scalar. /// </summary> /// <param name="left">The scalar value.</param> /// <param name="right">The source vector.</param> /// <returns>The scaled vector.</returns> [JitIntrinsic] [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Vector4 operator *(Single left, Vector4 right) { return new Vector4(left) * right; } /// <summary> /// Divides the first vector by the second. /// </summary> /// <param name="left">The first source vector.</param> /// <param name="right">The second source vector.</param> /// <returns>The vector resulting from the division.</returns> [JitIntrinsic] [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Vector4 operator /(Vector4 left, Vector4 right) { return new Vector4(left.X / right.X, left.Y / right.Y, left.Z / right.Z, left.W / right.W); } /// <summary> /// Divides the vector by the given scalar. /// </summary> /// <param name="value1">The source vector.</param> /// <param name="value2">The scalar value.</param> /// <returns>The result of the division.</returns> [JitIntrinsic] [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Vector4 operator /(Vector4 value1, float value2) { float invDiv = 1.0f / value2; return new Vector4( value1.X * invDiv, value1.Y * invDiv, value1.Z * invDiv, value1.W * invDiv); } /// <summary> /// Negates a given vector. /// </summary> /// <param name="value">The source vector.</param> /// <returns>The negated vector.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Vector4 operator -(Vector4 value) { return Zero - value; } /// <summary> /// Returns a boolean indicating whether the two given vectors are equal. /// </summary> /// <param name="left">The first vector to compare.</param> /// <param name="right">The second vector to compare.</param> /// <returns>True if the vectors are equal; False otherwise.</returns> [JitIntrinsic] [MethodImpl(MethodImplOptions.AggressiveInlining)] public static bool operator ==(Vector4 left, Vector4 right) { return left.Equals(right); } /// <summary> /// Returns a boolean indicating whether the two given vectors are not equal. /// </summary> /// <param name="left">The first vector to compare.</param> /// <param name="right">The second vector to compare.</param> /// <returns>True if the vectors are not equal; False if they are equal.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static bool operator !=(Vector4 left, Vector4 right) { return !(left == right); } #endregion Public static operators } }
// -- FILE ------------------------------------------------------------------ // name : TimePeriodDemoData.cs // project : Itenso Time Period // created : Jani Giannoudis - 2011.02.18 // language : C# 4.0 // environment: .NET 2.0 // copyright : (c) 2011-2012 by Itenso GmbH, Switzerland // -------------------------------------------------------------------------- using System; using System.Globalization; using System.Threading; using Itenso.TimePeriod; namespace Itenso.TimePeriodDemo { // ------------------------------------------------------------------------ internal class TimePeriodDemoData { // ---------------------------------------------------------------------- public TimePeriodDemoData() { Reset(); } // TimePeriodDemoData // ---------------------------------------------------------------------- public TimeCalendarConfig CalendarConfig { get { return new TimeCalendarConfig { Culture = culture, YearBaseMonth = YearBaseMonth, YearWeekType = YearWeekType }; } } // CalendarConfig // ---------------------------------------------------------------------- public DateTime SetupDate { get; private set; } // ---------------------------------------------------------------------- public int PeriodCount { get; private set; } // ---------------------------------------------------------------------- public int Year { get; private set; } // ---------------------------------------------------------------------- public YearHalfyear Halfyear { get; private set; } // ---------------------------------------------------------------------- public YearQuarter Quarter { get; private set; } // ---------------------------------------------------------------------- public YearMonth Month { get; private set; } // ---------------------------------------------------------------------- public int Week { get; private set; } // ---------------------------------------------------------------------- public int Day { get; private set; } // ---------------------------------------------------------------------- public int Hour { get; private set; } // ---------------------------------------------------------------------- public int Minute { get; private set; } // ---------------------------------------------------------------------- private string CultureName { get; set; } // ---------------------------------------------------------------------- private YearMonth YearBaseMonth { get; set; } // ---------------------------------------------------------------------- private YearWeekType YearWeekType { get; set; } // ---------------------------------------------------------------------- public bool QueryCulture() { string cultureName = ConsoleTool.QueryText( "Culture [enter=" + CultureInfo.CurrentCulture.Name + "]: ", CultureInfo.CurrentCulture.Name ); if ( cultureName == null ) { return false; } if ( UpdateCulture( cultureName ) == false ) { return false; } return true; } // QueryCulture // ---------------------------------------------------------------------- public bool QueryPeriodCount() { int? periodCount = ConsoleTool.QueryNumber( "Period count [enter=" + PeriodCount + "]: ", PeriodCount, 1, 10000 ); if ( !periodCount.HasValue ) { return false; } PeriodCount = periodCount.Value; return true; } // QueryPeriodCount // ---------------------------------------------------------------------- public bool QueryYear() { int? year = ConsoleTool.QueryNumber( "Year [enter=" + Year + "]: ", Year, DateTime.MinValue.Year, DateTime.MaxValue.Year ); if ( !year.HasValue ) { return false; } Year = year.Value; return true; } // QueryYear // ---------------------------------------------------------------------- public bool QueryYearBaseMonth() { int? yearStartMonth = ConsoleTool.QueryNumber( "Year start month (1..12) [enter=" + (int)YearBaseMonth + "/" + YearBaseMonth + "]: ", (int)YearBaseMonth, 1, TimeSpec.MonthsPerYear ); if ( !yearStartMonth.HasValue ) { return false; } YearBaseMonth = (YearMonth)yearStartMonth.Value; return true; } // QueryYearBaseMonth // ---------------------------------------------------------------------- public bool QueryYearHalfyear() { int? halfyear = ConsoleTool.QueryNumber( "Halfyear (1..2) [enter=" + (int)Halfyear + "/" + Halfyear + "]: ", (int)Halfyear, 1, TimeSpec.HalfyearsPerYear ); if ( !halfyear.HasValue ) { return false; } Halfyear = (YearHalfyear)halfyear.Value; return true; } // QueryYearHalfyear // ---------------------------------------------------------------------- public bool QueryYearQuarter() { int? yearQuarter = ConsoleTool.QueryNumber( "Quarter (1..4) [enter=" + (int)Quarter + "/" + Quarter + "]: ", (int)Quarter, 1, TimeSpec.QuartersPerYear ); if ( !yearQuarter.HasValue ) { return false; } Quarter = (YearQuarter)yearQuarter.Value; return true; } // QueryYearQuarter // ---------------------------------------------------------------------- public bool QueryYearMonth() { int? yearMonth = ConsoleTool.QueryNumber( "Month (1..12) [enter=" + (int)Month + "/" + Month + "]: ", (int)Month, 1, TimeSpec.QuartersPerYear ); if ( !yearMonth.HasValue ) { return false; } Month = (YearMonth)yearMonth.Value; return true; } // QueryYearMonth // ---------------------------------------------------------------------- public bool QueryWeek() { int? week = ConsoleTool.QueryNumber( "Week (1..53) [enter=" + Week + "]: ", Week, 1, 53 ); if ( !week.HasValue ) { return false; } Week = week.Value; return true; } // QueryWeek // ---------------------------------------------------------------------- public bool QueryYearWeekType() { int? weekType = ConsoleTool.QueryNumber( "Week type (0..1) [enter=" + (int)YearWeekType + "/" + YearWeekType + "]: ", (int)YearWeekType, 0, 2 ); if ( !weekType.HasValue ) { return false; } YearWeekType = (YearWeekType)weekType.Value; return true; } // QueryYearWeekType // ---------------------------------------------------------------------- public bool QueryDay() { int? day = ConsoleTool.QueryNumber( "Day (1..31) [enter=" + Day + "]: ", Day, 1, 31 ); if ( !day.HasValue ) { return false; } Day = day.Value; return true; } // QueryDay // ---------------------------------------------------------------------- public bool QueryHour() { int? hour = ConsoleTool.QueryNumber( "Hour (0..23) [enter=" + Hour + "]: ", Hour, 0, TimeSpec.HoursPerDay - 1 ); if ( !hour.HasValue ) { return false; } Hour = hour.Value; return true; } // QueryHour // ---------------------------------------------------------------------- public bool QueryMinute() { int? minute = ConsoleTool.QueryNumber( "Minute (0..59) [enter=" + Minute + "]: ", Minute, 0, TimeSpec.MinutesPerHour - 1 ); if ( !minute.HasValue ) { return false; } Minute = minute.Value; return true; } // QueryMinute // ---------------------------------------------------------------------- public void Reset() { Reset( SetupDate ); } // Reset // ---------------------------------------------------------------------- public void Reset( DateTime dateTime ) { culture = CultureInfo.CurrentCulture; CultureName = culture.Name; YearBaseMonth = TimeSpec.CalendarYearStartMonth; int year; int weekOfYear; TimeTool.GetWeekOfYear( SetupDate, culture, YearWeekType, out year, out weekOfYear ); PeriodCount = 1; SetupDate = ClockProxy.Clock.Now; Year = SetupDate.Year; Halfyear = TimeTool.GetHalfyearOfMonth( YearBaseMonth ); Quarter = TimeTool.GetQuarterOfMonth( YearBaseMonth ); Month = (YearMonth)SetupDate.Month; Week = weekOfYear; Day = SetupDate.Day; Hour = SetupDate.Hour; Minute = SetupDate.Minute; } // Reset // ---------------------------------------------------------------------- private bool UpdateCulture( string cultureName ) { try { culture = new CultureInfo( cultureName ); } catch ( Exception e ) { ConsoleTool.WriteLine( e.Message ); return false; } CultureName = cultureName; return true; } // UpdateCulture // ---------------------------------------------------------------------- // members private CultureInfo culture; } // class TimePeriodDemoData } // namespace Itenso.TimePeriodDemo // -- EOF -------------------------------------------------------------------
using System; using System.Web; using System.Web.Security; using System.Web.Services; using System.ComponentModel; using System.Web.Script.Services; using System.Xml; using System.Xml.Xsl; using System.IO; using System.Text.RegularExpressions; using System.Net; using System.Web.UI; using Umbraco.Core; using Umbraco.Core.IO; using Umbraco.Web.UI; using Umbraco.Web; using Umbraco.Web.Cache; using Umbraco.Web.WebServices; using umbraco.BusinessLogic; using umbraco.businesslogic.Exceptions; using umbraco.cms.businesslogic.web; using umbraco.cms.businesslogic.media; using umbraco.BasePages; namespace umbraco.presentation.webservices { /// <summary> /// Summary description for legacyAjaxCalls /// </summary> [WebService(Namespace = "http://umbraco.org/webservices")] [WebServiceBinding(ConformsTo = WsiProfiles.BasicProfile1_1)] [ToolboxItem(false)] [ScriptService] public class legacyAjaxCalls : UmbracoAuthorizedWebService { private User _currentUser; [WebMethod] public bool ValidateUser(string username, string password) { if (ValidateCredentials(username, password)) { var u = new BusinessLogic.User(username); BasePage.doLogin(u); return true; } return false; } /// <summary> /// method to accept a string value for the node id. Used for tree's such as python /// and xslt since the file names are the node IDs /// </summary> /// <param name="nodeId"></param> /// <param name="alias"></param> /// <param name="nodeType"></param> [WebMethod] [ScriptMethod] public void Delete(string nodeId, string alias, string nodeType) { if (!AuthorizeRequest()) return; //U4-2686 - alias is html encoded, make sure to decode alias = HttpUtility.HtmlDecode(alias); //check which parameters to pass depending on the types passed in int intNodeId; if (nodeType == "memberGroups") { LegacyDialogHandler.Delete( new HttpContextWrapper(HttpContext.Current), UmbracoUser, nodeType, 0, nodeId); } else if (int.TryParse(nodeId, out intNodeId) && nodeType != "member") // Fix for #26965 - numeric member login gets parsed as nodeId { LegacyDialogHandler.Delete( new HttpContextWrapper(HttpContext.Current), UmbracoUser, nodeType, intNodeId, alias); } else { LegacyDialogHandler.Delete( new HttpContextWrapper(HttpContext.Current), UmbracoUser, nodeType, 0, nodeId); } } /// <summary> /// Permanently deletes a document/media object. /// Used to remove an item from the recycle bin. /// </summary> /// <param name="nodeId"></param> /// <param name="nodeType"></param> [WebMethod] [ScriptMethod] public void DeleteContentPermanently(string nodeId, string nodeType) { int intNodeId; if (int.TryParse(nodeId, out intNodeId)) { switch (nodeType) { case "media": case "mediaRecycleBin": //ensure user has access to media AuthorizeRequest(DefaultApps.media.ToString(), true); new Media(intNodeId).delete(true); break; case "content": case "contentRecycleBin": default: //ensure user has access to content AuthorizeRequest(DefaultApps.content.ToString(), true); new Document(intNodeId).delete(true); break; } } else { throw new ArgumentException("The nodeId argument could not be parsed to an integer"); } } [WebMethod] [ScriptMethod] public void DisableUser(int userId) { AuthorizeRequest(DefaultApps.users.ToString(), true); BusinessLogic.User.GetUser(userId).disable(); } [WebMethod] [ScriptMethod] public string GetNodeName(int nodeId) { AuthorizeRequest(true); return new cms.businesslogic.CMSNode(nodeId).Text; } [WebMethod] [ScriptMethod] public string[] GetNodeBreadcrumbs(int nodeId) { AuthorizeRequest(true); var node = new cms.businesslogic.CMSNode(nodeId); var crumbs = new System.Collections.Generic.List<string>() { node.Text }; while (node != null && node.Level > 1) { node = node.Parent; crumbs.Add(node.Text); } crumbs.Reverse(); return crumbs.ToArray(); } [WebMethod] [ScriptMethod] public string NiceUrl(int nodeId) { AuthorizeRequest(true); return library.NiceUrl(nodeId); } [WebMethod] [ScriptMethod] public string ProgressStatus(string Key) { AuthorizeRequest(true); return Application[helper.Request("key")].ToString(); } [Obsolete("This is no longer used and will be removed in future versions")] [WebMethod] [ScriptMethod] public void RenewUmbracoSession() { AuthorizeRequest(true); BasePage.RenewLoginTimeout(); } [Obsolete("This is no longer used and will be removed in future versions")] [WebMethod] [ScriptMethod] public int GetSecondsBeforeUserLogout() { //TODO: Change this to not throw an exception otherwise we end up with JS errors all the time when recompiling!! AuthorizeRequest(true); var timeout = BasePage.GetTimeout(true); var timeoutDate = new DateTime(timeout); var currentDate = DateTime.Now; return (int) timeoutDate.Subtract(currentDate).TotalSeconds; } [WebMethod] [ScriptMethod] public string TemplateMasterPageContentContainer(int templateId, int masterTemplateId) { AuthorizeRequest(DefaultApps.settings.ToString(), true); return new cms.businesslogic.template.Template(templateId).GetMasterContentElement(masterTemplateId); } [WebMethod] [ScriptMethod] public string SaveFile(string fileName, string fileAlias, string fileContents, string fileType, int fileID, int masterID, bool ignoreDebug) { switch (fileType) { case "xslt": AuthorizeRequest(DefaultApps.developer.ToString(), true); return SaveXslt(fileName, fileContents, ignoreDebug); case "python": AuthorizeRequest(DefaultApps.developer.ToString(), true); return "true"; case "css": AuthorizeRequest(DefaultApps.settings.ToString(), true); return SaveCss(fileName, fileContents, fileID); case "script": AuthorizeRequest(DefaultApps.settings.ToString(), true); return SaveScript(fileName, fileContents); case "template": AuthorizeRequest(DefaultApps.settings.ToString(), true); return SaveTemplate(fileName, fileAlias, fileContents, fileID, masterID); default: throw new ArgumentException(String.Format("Invalid fileType passed: '{0}'", fileType)); } } public string Tidy(string textToTidy) { AuthorizeRequest(true); return library.Tidy(helper.Request("StringToTidy"), true); } private static string SaveCss(string fileName, string fileContents, int fileId) { string returnValue; var stylesheet = new StyleSheet(fileId) { Content = fileContents, Text = fileName }; try { stylesheet.saveCssToFile(); returnValue = "true"; } catch (Exception ee) { throw new Exception("Couldn't save file", ee); } return returnValue; } private string SaveXslt(string fileName, string fileContents, bool ignoreDebugging) { IOHelper.EnsurePathExists(SystemDirectories.Xslt); var tempFileName = IOHelper.MapPath(SystemDirectories.Xslt + "/" + System.DateTime.Now.Ticks + "_temp.xslt"); using (var sw = File.CreateText(tempFileName)) { sw.Write(fileContents); sw.Close(); } // Test the xslt var errorMessage = ""; if (!ignoreDebugging) { try { // Check if there's any documents yet if (content.Instance.XmlContent.SelectNodes("/root/node").Count > 0) { var macroXml = new XmlDocument(); macroXml.LoadXml("<macro/>"); var macroXslt = new XslCompiledTransform(); var umbPage = new page(content.Instance.XmlContent.SelectSingleNode("//node [@parentID = -1]")); var xslArgs = macro.AddMacroXsltExtensions(); var lib = new library(umbPage); xslArgs.AddExtensionObject("urn:umbraco.library", lib); HttpContext.Current.Trace.Write("umbracoMacro", "After adding extensions"); // Add the current node xslArgs.AddParam("currentPage", "", library.GetXmlNodeById(umbPage.PageID.ToString())); HttpContext.Current.Trace.Write("umbracoMacro", "Before performing transformation"); // Create reader and load XSL file // We need to allow custom DTD's, useful for defining an ENTITY var readerSettings = new XmlReaderSettings(); readerSettings.ProhibitDtd = false; using (var xmlReader = XmlReader.Create(tempFileName, readerSettings)) { var xslResolver = new XmlUrlResolver { Credentials = CredentialCache.DefaultCredentials }; macroXslt.Load(xmlReader, XsltSettings.TrustedXslt, xslResolver); xmlReader.Close(); // Try to execute the transformation var macroResult = new HtmlTextWriter(new StringWriter()); macroXslt.Transform(macroXml, xslArgs, macroResult); macroResult.Close(); } } else { errorMessage = "stub"; } } catch (Exception errorXslt) { errorMessage = (errorXslt.InnerException ?? errorXslt).ToString(); // Full error message errorMessage = errorMessage.Replace("\n", "<br/>\n"); // Find error var m = Regex.Matches(errorMessage, @"\d*[^,],\d[^\)]", RegexOptions.IgnoreCase | RegexOptions.IgnorePatternWhitespace); foreach (Match mm in m) { var errorLine = mm.Value.Split(','); if (errorLine.Length > 0) { var theErrorLine = int.Parse(errorLine[0]); var theErrorChar = int.Parse(errorLine[1]); errorMessage = "Error in XSLT at line " + errorLine[0] + ", char " + errorLine[1] + "<br/>"; errorMessage += "<span style=\"font-family: courier; font-size: 11px;\">"; var xsltText = fileContents.Split("\n".ToCharArray()); for (var i = 0; i < xsltText.Length; i++) { if (i >= theErrorLine - 3 && i <= theErrorLine + 1) if (i + 1 == theErrorLine) { errorMessage += "<b>" + (i + 1) + ": &gt;&gt;&gt;&nbsp;&nbsp;" + Server.HtmlEncode(xsltText[i].Substring(0, theErrorChar)); errorMessage += "<span style=\"text-decoration: underline; border-bottom: 1px solid red\">" + Server.HtmlEncode(xsltText[i].Substring(theErrorChar, xsltText[i].Length - theErrorChar)).Trim() + "</span>"; errorMessage += " &lt;&lt;&lt;</b><br/>"; } else errorMessage += (i + 1) + ": &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;" + Server.HtmlEncode(xsltText[i]) + "<br/>"; } errorMessage += "</span>"; } } } } if (errorMessage == "" && fileName.ToLower().EndsWith(".xslt")) { //Hardcoded security-check... only allow saving files in xslt directory... var savePath = IOHelper.MapPath(SystemDirectories.Xslt + "/" + fileName); if (savePath.StartsWith(IOHelper.MapPath(SystemDirectories.Xslt))) { using (var sw = File.CreateText(savePath)) { sw.Write(fileContents); sw.Close(); } errorMessage = "true"; } else { errorMessage = "Illegal path"; } } File.Delete(tempFileName); return errorMessage; } private static string SaveScript(string filename, string contents) { var val = contents; string returnValue; try { var savePath = IOHelper.MapPath(SystemDirectories.Scripts + "/" + filename); //Directory check.. only allow files in script dir and below to be edited if (savePath.StartsWith(IOHelper.MapPath(SystemDirectories.Scripts + "/"))) { //ensure the folder exists before saving Directory.CreateDirectory(Path.GetDirectoryName(savePath)); using (var sw = File.CreateText(IOHelper.MapPath(SystemDirectories.Scripts + "/" + filename))) { sw.Write(val); sw.Close(); returnValue = "true"; } } else { throw new ArgumentException("Couldnt save to file - Illegal path"); } } catch (Exception ex) { throw new ArgumentException(String.Format("Couldnt save to file '{0}'", filename), ex); } return returnValue; } private static string SaveTemplate(string templateName, string templateAlias, string templateContents, int templateID, int masterTemplateID) { var tp = new cms.businesslogic.template.Template(templateID); var retVal = "false"; tp.Text = templateName; tp.Alias = templateAlias; tp.MasterTemplate = masterTemplateID; tp.Design = templateContents; tp.Save(); retVal = "true"; return retVal; } [Obsolete("You should use the AuthorizeRequest methods on the base class of UmbracoAuthorizedWebService and ensure you inherit from that class for umbraco asmx web services")] public static void Authorize() { // check for secure connection if (GlobalSettings.UseSSL && !HttpContext.Current.Request.IsSecureConnection) throw new UserAuthorizationException("This installation requires a secure connection (via SSL). Please update the URL to include https://"); if (!BasePage.ValidateUserContextID(BasePages.BasePage.umbracoUserContextID)) throw new Exception("Client authorization failed. User is not logged in"); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /*============================================================================= ** ** ** ** Purpose: Class for creating and managing a threadpool ** ** =============================================================================*/ #pragma warning disable 0420 /* * Below you'll notice two sets of APIs that are separated by the * use of 'Unsafe' in their names. The unsafe versions are called * that because they do not propagate the calling stack onto the * worker thread. This allows code to lose the calling stack and * thereby elevate its security privileges. Note that this operation * is much akin to the combined ability to control security policy * and control security evidence. With these privileges, a person * can gain the right to load assemblies that are fully trusted which * then assert full trust and can call any code they want regardless * of the previous stack information. */ using Internal.Runtime.Augments; using System.Collections.Concurrent; using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Runtime.InteropServices; namespace System.Threading { internal static class ThreadPoolGlobals { public static readonly int processorCount = Environment.ProcessorCount; private static ThreadPoolWorkQueue _workQueue; public static ThreadPoolWorkQueue workQueue { get { return LazyInitializer.EnsureInitialized(ref _workQueue, () => new ThreadPoolWorkQueue()); } } } internal sealed class ThreadPoolWorkQueue { internal static class WorkStealingQueueList { private static volatile WorkStealingQueue[] _queues = new WorkStealingQueue[0]; public static WorkStealingQueue[] Queues => _queues; public static void Add(WorkStealingQueue queue) { Debug.Assert(queue != null); while (true) { WorkStealingQueue[] oldQueues = _queues; Debug.Assert(Array.IndexOf(oldQueues, queue) == -1); var newQueues = new WorkStealingQueue[oldQueues.Length + 1]; Array.Copy(oldQueues, 0, newQueues, 0, oldQueues.Length); newQueues[newQueues.Length - 1] = queue; if (Interlocked.CompareExchange(ref _queues, newQueues, oldQueues) == oldQueues) { break; } } } public static void Remove(WorkStealingQueue queue) { Debug.Assert(queue != null); while (true) { WorkStealingQueue[] oldQueues = _queues; if (oldQueues.Length == 0) { return; } int pos = Array.IndexOf(oldQueues, queue); if (pos == -1) { Debug.Fail("Should have found the queue"); return; } var newQueues = new WorkStealingQueue[oldQueues.Length - 1]; if (pos == 0) { Array.Copy(oldQueues, 1, newQueues, 0, newQueues.Length); } else if (pos == oldQueues.Length - 1) { Array.Copy(oldQueues, 0, newQueues, 0, newQueues.Length); } else { Array.Copy(oldQueues, 0, newQueues, 0, pos); Array.Copy(oldQueues, pos + 1, newQueues, pos, newQueues.Length - pos); } if (Interlocked.CompareExchange(ref _queues, newQueues, oldQueues) == oldQueues) { break; } } } } internal sealed class WorkStealingQueue { private const int INITIAL_SIZE = 32; internal volatile IThreadPoolWorkItem[] m_array = new IThreadPoolWorkItem[INITIAL_SIZE]; private volatile int m_mask = INITIAL_SIZE - 1; #if DEBUG // in debug builds, start at the end so we exercise the index reset logic. private const int START_INDEX = int.MaxValue; #else private const int START_INDEX = 0; #endif private volatile int m_headIndex = START_INDEX; private volatile int m_tailIndex = START_INDEX; private SpinLock m_foreignLock = new SpinLock(enableThreadOwnerTracking: false); public void LocalPush(IThreadPoolWorkItem obj) { int tail = m_tailIndex; // We're going to increment the tail; if we'll overflow, then we need to reset our counts if (tail == int.MaxValue) { bool lockTaken = false; try { m_foreignLock.Enter(ref lockTaken); if (m_tailIndex == int.MaxValue) { // // Rather than resetting to zero, we'll just mask off the bits we don't care about. // This way we don't need to rearrange the items already in the queue; they'll be found // correctly exactly where they are. One subtlety here is that we need to make sure that // if head is currently < tail, it remains that way. This happens to just fall out from // the bit-masking, because we only do this if tail == int.MaxValue, meaning that all // bits are set, so all of the bits we're keeping will also be set. Thus it's impossible // for the head to end up > than the tail, since you can't set any more bits than all of // them. // m_headIndex = m_headIndex & m_mask; m_tailIndex = tail = m_tailIndex & m_mask; Debug.Assert(m_headIndex <= m_tailIndex); } } finally { if (lockTaken) m_foreignLock.Exit(useMemoryBarrier: true); } } // When there are at least 2 elements' worth of space, we can take the fast path. if (tail < m_headIndex + m_mask) { Volatile.Write(ref m_array[tail & m_mask], obj); m_tailIndex = tail + 1; } else { // We need to contend with foreign pops, so we lock. bool lockTaken = false; try { m_foreignLock.Enter(ref lockTaken); int head = m_headIndex; int count = m_tailIndex - m_headIndex; // If there is still space (one left), just add the element. if (count >= m_mask) { // We're full; expand the queue by doubling its size. var newArray = new IThreadPoolWorkItem[m_array.Length << 1]; for (int i = 0; i < m_array.Length; i++) newArray[i] = m_array[(i + head) & m_mask]; // Reset the field values, incl. the mask. m_array = newArray; m_headIndex = 0; m_tailIndex = tail = count; m_mask = (m_mask << 1) | 1; } Volatile.Write(ref m_array[tail & m_mask], obj); m_tailIndex = tail + 1; } finally { if (lockTaken) m_foreignLock.Exit(useMemoryBarrier: false); } } } [SuppressMessage("Microsoft.Concurrency", "CA8001", Justification = "Reviewed for thread safety")] public bool LocalFindAndPop(IThreadPoolWorkItem obj) { // Fast path: check the tail. If equal, we can skip the lock. if (m_array[(m_tailIndex - 1) & m_mask] == obj) { IThreadPoolWorkItem unused = LocalPop(); Debug.Assert(unused == null || unused == obj); return unused != null; } // Else, do an O(N) search for the work item. The theory of work stealing and our // inlining logic is that most waits will happen on recently queued work. And // since recently queued work will be close to the tail end (which is where we // begin our search), we will likely find it quickly. In the worst case, we // will traverse the whole local queue; this is typically not going to be a // problem (although degenerate cases are clearly an issue) because local work // queues tend to be somewhat shallow in length, and because if we fail to find // the work item, we are about to block anyway (which is very expensive). for (int i = m_tailIndex - 2; i >= m_headIndex; i--) { if (m_array[i & m_mask] == obj) { // If we found the element, block out steals to avoid interference. bool lockTaken = false; try { m_foreignLock.Enter(ref lockTaken); // If we encountered a race condition, bail. if (m_array[i & m_mask] == null) return false; // Otherwise, null out the element. Volatile.Write(ref m_array[i & m_mask], null); // And then check to see if we can fix up the indexes (if we're at // the edge). If we can't, we just leave nulls in the array and they'll // get filtered out eventually (but may lead to superflous resizing). if (i == m_tailIndex) m_tailIndex -= 1; else if (i == m_headIndex) m_headIndex += 1; return true; } finally { if (lockTaken) m_foreignLock.Exit(useMemoryBarrier: false); } } } return false; } public IThreadPoolWorkItem LocalPop() => m_headIndex < m_tailIndex ? LocalPopCore() : null; [SuppressMessage("Microsoft.Concurrency", "CA8001", Justification = "Reviewed for thread safety")] private IThreadPoolWorkItem LocalPopCore() { while (true) { int tail = m_tailIndex; if (m_headIndex >= tail) { return null; } // Decrement the tail using a fence to ensure subsequent read doesn't come before. tail -= 1; Interlocked.Exchange(ref m_tailIndex, tail); // If there is no interaction with a take, we can head down the fast path. if (m_headIndex <= tail) { int idx = tail & m_mask; IThreadPoolWorkItem obj = Volatile.Read(ref m_array[idx]); // Check for nulls in the array. if (obj == null) continue; m_array[idx] = null; return obj; } else { // Interaction with takes: 0 or 1 elements left. bool lockTaken = false; try { m_foreignLock.Enter(ref lockTaken); if (m_headIndex <= tail) { // Element still available. Take it. int idx = tail & m_mask; IThreadPoolWorkItem obj = Volatile.Read(ref m_array[idx]); // Check for nulls in the array. if (obj == null) continue; m_array[idx] = null; return obj; } else { // If we encountered a race condition and element was stolen, restore the tail. m_tailIndex = tail + 1; return null; } } finally { if (lockTaken) m_foreignLock.Exit(useMemoryBarrier: false); } } } } public bool CanSteal => m_headIndex < m_tailIndex; public IThreadPoolWorkItem TrySteal(ref bool missedSteal) { while (true) { if (CanSteal) { bool taken = false; try { m_foreignLock.TryEnter(ref taken); if (taken) { // Increment head, and ensure read of tail doesn't move before it (fence). int head = m_headIndex; Interlocked.Exchange(ref m_headIndex, head + 1); if (head < m_tailIndex) { int idx = head & m_mask; IThreadPoolWorkItem obj = Volatile.Read(ref m_array[idx]); // Check for nulls in the array. if (obj == null) continue; m_array[idx] = null; return obj; } else { // Failed, restore head. m_headIndex = head; } } } finally { if (taken) m_foreignLock.Exit(useMemoryBarrier: false); } missedSteal = true; } return null; } } } internal readonly LowLevelConcurrentQueue<IThreadPoolWorkItem> workItems = new LowLevelConcurrentQueue<IThreadPoolWorkItem>(); private volatile int numOutstandingThreadRequests = 0; // The number of threads executing work items in the Dispatch method internal volatile int numWorkingThreads; public ThreadPoolWorkQueue() { } public ThreadPoolWorkQueueThreadLocals EnsureCurrentThreadHasQueue() => ThreadPoolWorkQueueThreadLocals.threadLocals ?? (ThreadPoolWorkQueueThreadLocals.threadLocals = new ThreadPoolWorkQueueThreadLocals(this)); internal void EnsureThreadRequested() { // // If we have not yet requested #procs threads, then request a new thread. // int count = numOutstandingThreadRequests; while (count < ThreadPoolGlobals.processorCount) { int prev = Interlocked.CompareExchange(ref numOutstandingThreadRequests, count + 1, count); if (prev == count) { ThreadPool.RequestWorkerThread(); break; } count = prev; } } internal void MarkThreadRequestSatisfied() { // // One of our outstanding thread requests has been satisfied. // Decrement the count so that future calls to EnsureThreadRequested will succeed. // int count = numOutstandingThreadRequests; while (count > 0) { int prev = Interlocked.CompareExchange(ref numOutstandingThreadRequests, count - 1, count); if (prev == count) { break; } count = prev; } } public void Enqueue(IThreadPoolWorkItem callback, bool forceGlobal) { ThreadPoolWorkQueueThreadLocals tl = null; if (!forceGlobal) tl = ThreadPoolWorkQueueThreadLocals.threadLocals; if (null != tl) { tl.workStealingQueue.LocalPush(callback); } else { workItems.Enqueue(callback); } EnsureThreadRequested(); } internal bool LocalFindAndPop(IThreadPoolWorkItem callback) { ThreadPoolWorkQueueThreadLocals tl = ThreadPoolWorkQueueThreadLocals.threadLocals; return tl != null && tl.workStealingQueue.LocalFindAndPop(callback); } public IThreadPoolWorkItem Dequeue(ThreadPoolWorkQueueThreadLocals tl, ref bool missedSteal) { WorkStealingQueue localWsq = tl.workStealingQueue; IThreadPoolWorkItem callback; if ((callback = localWsq.LocalPop()) == null && // first try the local queue !workItems.TryDequeue(out callback)) // then try the global queue { // finally try to steal from another thread's local queue WorkStealingQueue[] queues = WorkStealingQueueList.Queues; int c = queues.Length; Debug.Assert(c > 0, "There must at least be a queue for this thread."); int maxIndex = c - 1; int i = tl.random.Next(c); while (c > 0) { i = (i < maxIndex) ? i + 1 : 0; WorkStealingQueue otherQueue = queues[i]; if (otherQueue != localWsq && otherQueue.CanSteal) { callback = otherQueue.TrySteal(ref missedSteal); if (callback != null) { break; } } c--; } } return callback; } /// <summary> /// Dispatches work items to this thread. /// </summary> /// <returns> /// <c>true</c> if this thread did as much work as was available or its quantum expired. /// <c>false</c> if this thread stopped working early. /// </returns> internal static bool Dispatch() { var workQueue = ThreadPoolGlobals.workQueue; // // Save the start time // int startTickCount = Environment.TickCount; // // Update our records to indicate that an outstanding request for a thread has now been fulfilled. // From this point on, we are responsible for requesting another thread if we stop working for any // reason, and we believe there might still be work in the queue. // workQueue.MarkThreadRequestSatisfied(); Interlocked.Increment(ref workQueue.numWorkingThreads); // // Assume that we're going to need another thread if this one returns to the VM. We'll set this to // false later, but only if we're absolutely certain that the queue is empty. // bool needAnotherThread = true; IThreadPoolWorkItem workItem = null; try { // // Set up our thread-local data // ThreadPoolWorkQueueThreadLocals tl = workQueue.EnsureCurrentThreadHasQueue(); // // Loop until our quantum expires or there is no work. // while (ThreadPool.KeepDispatching(startTickCount)) { bool missedSteal = false; workItem = workQueue.Dequeue(tl, ref missedSteal); if (workItem == null) { // // No work. // If we missed a steal, though, there may be more work in the queue. // Instead of looping around and trying again, we'll just request another thread. Hopefully the thread // that owns the contended work-stealing queue will pick up its own workitems in the meantime, // which will be more efficient than this thread doing it anyway. // needAnotherThread = missedSteal; // Tell the VM we're returning normally, not because Hill Climbing asked us to return. return true; } // // If we found work, there may be more work. Ask for another thread so that the other work can be processed // in parallel. Note that this will only ask for a max of #procs threads, so it's safe to call it for every dequeue. // workQueue.EnsureThreadRequested(); try { SynchronizationContext.SetSynchronizationContext(null); workItem.ExecuteWorkItem(); } finally { workItem = null; SynchronizationContext.SetSynchronizationContext(null); } RuntimeThread.CurrentThread.ResetThreadPoolThread(); if (!ThreadPool.NotifyWorkItemComplete()) return false; } // If we get here, it's because our quantum expired. return true; } catch (Exception e) { // Work items should not allow exceptions to escape. For example, Task catches and stores any exceptions. Environment.FailFast("Unhandled exception in ThreadPool dispatch loop", e); return true; // Will never actually be executed because Environment.FailFast doesn't return } finally { int numWorkers = Interlocked.Decrement(ref workQueue.numWorkingThreads); Debug.Assert(numWorkers >= 0); // // If we are exiting for any reason other than that the queue is definitely empty, ask for another // thread to pick up where we left off. // if (needAnotherThread) workQueue.EnsureThreadRequested(); } } } // Simple random number generator. We don't need great randomness, we just need a little and for it to be fast. internal struct FastRandom // xorshift prng { private uint _w, _x, _y, _z; public FastRandom(int seed) { _x = (uint)seed; _w = 88675123; _y = 362436069; _z = 521288629; } public int Next(int maxValue) { Debug.Assert(maxValue > 0); uint t = _x ^ (_x << 11); _x = _y; _y = _z; _z = _w; _w = _w ^ (_w >> 19) ^ (t ^ (t >> 8)); return (int)(_w % (uint)maxValue); } } // Holds a WorkStealingQueue, and remmoves it from the list when this object is no longer referened. internal sealed class ThreadPoolWorkQueueThreadLocals { [ThreadStatic] public static ThreadPoolWorkQueueThreadLocals threadLocals; public readonly ThreadPoolWorkQueue workQueue; public readonly ThreadPoolWorkQueue.WorkStealingQueue workStealingQueue; public FastRandom random = new FastRandom(Environment.CurrentManagedThreadId); // mutable struct, do not copy or make readonly public ThreadPoolWorkQueueThreadLocals(ThreadPoolWorkQueue tpq) { workQueue = tpq; workStealingQueue = new ThreadPoolWorkQueue.WorkStealingQueue(); ThreadPoolWorkQueue.WorkStealingQueueList.Add(workStealingQueue); } private void CleanUp() { if (null != workStealingQueue) { if (null != workQueue) { IThreadPoolWorkItem cb; while ((cb = workStealingQueue.LocalPop()) != null) { Debug.Assert(null != cb); workQueue.Enqueue(cb, forceGlobal: true); } } ThreadPoolWorkQueue.WorkStealingQueueList.Remove(workStealingQueue); } } ~ThreadPoolWorkQueueThreadLocals() { // Since the purpose of calling CleanUp is to transfer any pending workitems into the global // queue so that they will be executed by another thread, there's no point in doing this cleanup // if we're in the process of shutting down or unloading the AD. In those cases, the work won't // execute anyway. And there are subtle races involved there that would lead us to do the wrong // thing anyway. So we'll only clean up if this is a "normal" finalization. if (!(Environment.HasShutdownStarted /*|| AppDomain.CurrentDomain.IsFinalizingForUnload()*/)) CleanUp(); } } public delegate void WaitCallback(Object state); public delegate void WaitOrTimerCallback(Object state, bool timedOut); // signalled or timed out // // Interface to something that can be queued to the TP. This is implemented by // QueueUserWorkItemCallback, Task, and potentially other internal types. // For example, SemaphoreSlim represents callbacks using its own type that // implements IThreadPoolWorkItem. // // If we decide to expose some of the workstealing // stuff, this is NOT the thing we want to expose to the public. // internal interface IThreadPoolWorkItem { void ExecuteWorkItem(); } internal sealed class QueueUserWorkItemCallback : IThreadPoolWorkItem { private WaitCallback callback; private readonly ExecutionContext context; private readonly Object state; #if DEBUG private volatile int executed; [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Performance", "CA1821:RemoveEmptyFinalizers")] ~QueueUserWorkItemCallback() { Debug.Assert( executed != 0 || Environment.HasShutdownStarted /*|| AppDomain.CurrentDomain.IsFinalizingForUnload()*/, "A QueueUserWorkItemCallback was never called!"); } private void MarkExecuted() { GC.SuppressFinalize(this); Debug.Assert( 0 == Interlocked.Exchange(ref executed, 1), "A QueueUserWorkItemCallback was called twice!"); } #endif internal QueueUserWorkItemCallback(WaitCallback waitCallback, Object stateObj, ExecutionContext ec) { callback = waitCallback; state = stateObj; context = ec; } void IThreadPoolWorkItem.ExecuteWorkItem() { #if DEBUG MarkExecuted(); #endif try { if (context == null) { WaitCallback cb = callback; callback = null; cb(state); } else ExecutionContext.Run(context, ccb, this); } catch (Exception e) { RuntimeAugments.ReportUnhandledException(e); throw; //unreachable } } internal static readonly ContextCallback ccb = new ContextCallback(WaitCallback_Context); private static void WaitCallback_Context(Object state) { QueueUserWorkItemCallback obj = (QueueUserWorkItemCallback)state; WaitCallback wc = obj.callback; Debug.Assert(null != wc); wc(obj.state); } } internal sealed class QueueUserWorkItemCallbackDefaultContext : IThreadPoolWorkItem { private WaitCallback callback; private readonly Object state; #if DEBUG private volatile int executed; [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Performance", "CA1821:RemoveEmptyFinalizers")] ~QueueUserWorkItemCallbackDefaultContext() { Debug.Assert( executed != 0 || Environment.HasShutdownStarted /*|| AppDomain.CurrentDomain.IsFinalizingForUnload()*/, "A QueueUserWorkItemCallbackDefaultContext was never called!"); } private void MarkExecuted() { GC.SuppressFinalize(this); Debug.Assert( 0 == Interlocked.Exchange(ref executed, 1), "A QueueUserWorkItemCallbackDefaultContext was called twice!"); } #endif internal QueueUserWorkItemCallbackDefaultContext(WaitCallback waitCallback, Object stateObj) { callback = waitCallback; state = stateObj; } void IThreadPoolWorkItem.ExecuteWorkItem() { #if DEBUG MarkExecuted(); #endif try { ExecutionContext.Run(ExecutionContext.Default, ccb, this); } catch (Exception e) { RuntimeAugments.ReportUnhandledException(e); throw; //unreachable } } internal static readonly ContextCallback ccb = new ContextCallback(WaitCallback_Context); private static void WaitCallback_Context(Object state) { QueueUserWorkItemCallbackDefaultContext obj = (QueueUserWorkItemCallbackDefaultContext)state; WaitCallback wc = obj.callback; Debug.Assert(null != wc); obj.callback = null; wc(obj.state); } } internal class _ThreadPoolWaitOrTimerCallback { private WaitOrTimerCallback _waitOrTimerCallback; private ExecutionContext _executionContext; private Object _state; private static readonly ContextCallback _ccbt = new ContextCallback(WaitOrTimerCallback_Context_t); private static readonly ContextCallback _ccbf = new ContextCallback(WaitOrTimerCallback_Context_f); internal _ThreadPoolWaitOrTimerCallback(WaitOrTimerCallback waitOrTimerCallback, Object state, bool flowExecutionContext) { _waitOrTimerCallback = waitOrTimerCallback; _state = state; if (flowExecutionContext) { // capture the exection context _executionContext = ExecutionContext.Capture(); } } private static void WaitOrTimerCallback_Context_t(Object state) => WaitOrTimerCallback_Context(state, timedOut: true); private static void WaitOrTimerCallback_Context_f(Object state) => WaitOrTimerCallback_Context(state, timedOut: false); private static void WaitOrTimerCallback_Context(Object state, bool timedOut) { _ThreadPoolWaitOrTimerCallback helper = (_ThreadPoolWaitOrTimerCallback)state; helper._waitOrTimerCallback(helper._state, timedOut); } // call back helper internal static void PerformWaitOrTimerCallback(_ThreadPoolWaitOrTimerCallback helper, bool timedOut) { Debug.Assert(helper != null, "Null state passed to PerformWaitOrTimerCallback!"); // call directly if it is an unsafe call OR EC flow is suppressed if (helper._executionContext == null) { WaitOrTimerCallback callback = helper._waitOrTimerCallback; callback(helper._state, timedOut); } else { ExecutionContext.Run(helper._executionContext, timedOut ? _ccbt : _ccbf, helper); } } } public static partial class ThreadPool { [CLSCompliant(false)] public static RegisteredWaitHandle RegisterWaitForSingleObject( WaitHandle waitObject, WaitOrTimerCallback callBack, Object state, uint millisecondsTimeOutInterval, bool executeOnlyOnce) { if (millisecondsTimeOutInterval > (uint)int.MaxValue && millisecondsTimeOutInterval != uint.MaxValue) throw new ArgumentOutOfRangeException(nameof(millisecondsTimeOutInterval), SR.ArgumentOutOfRange_NeedNonNegOrNegative1); return RegisterWaitForSingleObject(waitObject, callBack, state, millisecondsTimeOutInterval, executeOnlyOnce, true); } [CLSCompliant(false)] public static RegisteredWaitHandle UnsafeRegisterWaitForSingleObject( WaitHandle waitObject, WaitOrTimerCallback callBack, Object state, uint millisecondsTimeOutInterval, bool executeOnlyOnce) { if (millisecondsTimeOutInterval > (uint)int.MaxValue && millisecondsTimeOutInterval != uint.MaxValue) throw new ArgumentOutOfRangeException(nameof(millisecondsTimeOutInterval), SR.ArgumentOutOfRange_NeedNonNegOrNegative1); return RegisterWaitForSingleObject(waitObject, callBack, state, millisecondsTimeOutInterval, executeOnlyOnce, false); } public static RegisteredWaitHandle RegisterWaitForSingleObject( WaitHandle waitObject, WaitOrTimerCallback callBack, Object state, int millisecondsTimeOutInterval, bool executeOnlyOnce) { if (millisecondsTimeOutInterval < -1) throw new ArgumentOutOfRangeException(nameof(millisecondsTimeOutInterval), SR.ArgumentOutOfRange_NeedNonNegOrNegative1); return RegisterWaitForSingleObject(waitObject, callBack, state, (UInt32)millisecondsTimeOutInterval, executeOnlyOnce, true); } public static RegisteredWaitHandle UnsafeRegisterWaitForSingleObject( WaitHandle waitObject, WaitOrTimerCallback callBack, Object state, int millisecondsTimeOutInterval, bool executeOnlyOnce) { if (millisecondsTimeOutInterval < -1) throw new ArgumentOutOfRangeException(nameof(millisecondsTimeOutInterval), SR.ArgumentOutOfRange_NeedNonNegOrNegative1); return RegisterWaitForSingleObject(waitObject, callBack, state, (UInt32)millisecondsTimeOutInterval, executeOnlyOnce, false); } public static RegisteredWaitHandle RegisterWaitForSingleObject( WaitHandle waitObject, WaitOrTimerCallback callBack, Object state, long millisecondsTimeOutInterval, bool executeOnlyOnce) { if (millisecondsTimeOutInterval < -1 || millisecondsTimeOutInterval > int.MaxValue) throw new ArgumentOutOfRangeException(nameof(millisecondsTimeOutInterval), SR.ArgumentOutOfRange_NeedNonNegOrNegative1); return RegisterWaitForSingleObject(waitObject, callBack, state, (UInt32)millisecondsTimeOutInterval, executeOnlyOnce, true); } public static RegisteredWaitHandle UnsafeRegisterWaitForSingleObject( WaitHandle waitObject, WaitOrTimerCallback callBack, Object state, long millisecondsTimeOutInterval, bool executeOnlyOnce) { if (millisecondsTimeOutInterval < -1 || millisecondsTimeOutInterval > int.MaxValue) throw new ArgumentOutOfRangeException(nameof(millisecondsTimeOutInterval), SR.ArgumentOutOfRange_NeedNonNegOrNegative1); return RegisterWaitForSingleObject(waitObject, callBack, state, (UInt32)millisecondsTimeOutInterval, executeOnlyOnce, false); } public static RegisteredWaitHandle RegisterWaitForSingleObject( WaitHandle waitObject, WaitOrTimerCallback callBack, Object state, TimeSpan timeout, bool executeOnlyOnce) { int tm = WaitHandle.ToTimeoutMilliseconds(timeout); return RegisterWaitForSingleObject(waitObject, callBack, state, (UInt32)tm, executeOnlyOnce, true); } public static RegisteredWaitHandle UnsafeRegisterWaitForSingleObject( WaitHandle waitObject, WaitOrTimerCallback callBack, Object state, TimeSpan timeout, bool executeOnlyOnce) { int tm = WaitHandle.ToTimeoutMilliseconds(timeout); return RegisterWaitForSingleObject(waitObject, callBack, state, (UInt32)tm, executeOnlyOnce, false); } public static bool QueueUserWorkItem(WaitCallback callBack) => QueueUserWorkItem(callBack, null, preferLocal: false); public static bool QueueUserWorkItem(WaitCallback callBack, object state) => QueueUserWorkItem(callBack, state, preferLocal: false); public static bool QueueUserWorkItem(WaitCallback callBack, object state, bool preferLocal) { if (callBack == null) { throw new ArgumentNullException(nameof(callBack)); } ExecutionContext context = ExecutionContext.Capture(); IThreadPoolWorkItem tpcallBack = context == ExecutionContext.Default ? new QueueUserWorkItemCallbackDefaultContext(callBack, state) : (IThreadPoolWorkItem)new QueueUserWorkItemCallback(callBack, state, context); ThreadPoolGlobals.workQueue.Enqueue(tpcallBack, forceGlobal: !preferLocal); return true; } public static bool UnsafeQueueUserWorkItem(WaitCallback callBack, Object state) { if (callBack == null) { throw new ArgumentNullException(nameof(callBack)); } IThreadPoolWorkItem tpcallBack = new QueueUserWorkItemCallback(callBack, state, null); ThreadPoolGlobals.workQueue.Enqueue(tpcallBack, forceGlobal: true); return true; } internal static void UnsafeQueueCustomWorkItem(IThreadPoolWorkItem workItem, bool forceGlobal) { Debug.Assert(null != workItem); ThreadPoolGlobals.workQueue.Enqueue(workItem, forceGlobal); } // This method tries to take the target callback out of the current thread's queue. internal static bool TryPopCustomWorkItem(IThreadPoolWorkItem workItem) { Debug.Assert(null != workItem); return ThreadPoolGlobals.workQueue.LocalFindAndPop(workItem); } // Get all workitems. Called by TaskScheduler in its debugger hooks. internal static IEnumerable<IThreadPoolWorkItem> GetQueuedWorkItems() { // Enumerate the global queue foreach (IThreadPoolWorkItem workItem in ThreadPoolGlobals.workQueue.workItems) { yield return workItem; } // Enumerate each local queue foreach (ThreadPoolWorkQueue.WorkStealingQueue wsq in ThreadPoolWorkQueue.WorkStealingQueueList.Queues) { if (wsq != null && wsq.m_array != null) { IThreadPoolWorkItem[] items = wsq.m_array; for (int i = 0; i < items.Length; i++) { IThreadPoolWorkItem item = items[i]; if (item != null) { yield return item; } } } } } internal static IEnumerable<IThreadPoolWorkItem> GetLocallyQueuedWorkItems() { ThreadPoolWorkQueue.WorkStealingQueue wsq = ThreadPoolWorkQueueThreadLocals.threadLocals.workStealingQueue; if (wsq != null && wsq.m_array != null) { IThreadPoolWorkItem[] items = wsq.m_array; for (int i = 0; i < items.Length; i++) { IThreadPoolWorkItem item = items[i]; if (item != null) yield return item; } } } internal static IEnumerable<IThreadPoolWorkItem> GetGloballyQueuedWorkItems() => ThreadPoolGlobals.workQueue.workItems; private static object[] ToObjectArray(IEnumerable<IThreadPoolWorkItem> workitems) { int i = 0; foreach (IThreadPoolWorkItem item in workitems) { i++; } object[] result = new object[i]; i = 0; foreach (IThreadPoolWorkItem item in workitems) { if (i < result.Length) //just in case someone calls us while the queues are in motion result[i] = item; i++; } return result; } // This is the method the debugger will actually call, if it ends up calling // into ThreadPool directly. Tests can use this to simulate a debugger, as well. internal static object[] GetQueuedWorkItemsForDebugger() => ToObjectArray(GetQueuedWorkItems()); internal static object[] GetGloballyQueuedWorkItemsForDebugger() => ToObjectArray(GetGloballyQueuedWorkItems()); internal static object[] GetLocallyQueuedWorkItemsForDebugger() => ToObjectArray(GetLocallyQueuedWorkItems()); unsafe private static void NativeOverlappedCallback(object obj) { NativeOverlapped* overlapped = (NativeOverlapped*)(IntPtr)obj; _IOCompletionCallback.PerformIOCompletionCallback(0, 0, overlapped); } [CLSCompliant(false)] unsafe public static bool UnsafeQueueNativeOverlapped(NativeOverlapped* overlapped) { // OS doesn't signal handle, so do it here (CoreCLR does this assignment in ThreadPoolNative::CorPostQueuedCompletionStatus) overlapped->InternalLow = (IntPtr)0; // Both types of callbacks are executed on the same thread pool return UnsafeQueueUserWorkItem(NativeOverlappedCallback, (IntPtr)overlapped); } [Obsolete("ThreadPool.BindHandle(IntPtr) has been deprecated. Please use ThreadPool.BindHandle(SafeHandle) instead.", false)] public static bool BindHandle(IntPtr osHandle) { throw new PlatformNotSupportedException(SR.Arg_PlatformNotSupported); // Replaced by ThreadPoolBoundHandle.BindHandle } public static bool BindHandle(SafeHandle osHandle) { throw new PlatformNotSupportedException(SR.Arg_PlatformNotSupported); // Replaced by ThreadPoolBoundHandle.BindHandle } internal static bool IsThreadPoolThread { get { return ThreadPoolWorkQueueThreadLocals.threadLocals != null; } } } }
// // PrintOperation.cs // // Author: // Stephane Delcroix <[email protected]> // // Copyright (C) 2008-2009 Novell, Inc. // Copyright (C) 2008-2009 Stephane Delcroix // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using Cairo; using System; using System.Runtime.InteropServices; using Mono.Unix; using FSpot.Core; using FSpot.Widgets; using FSpot.Imaging; using Hyena; namespace FSpot { public class PrintOperation : Gtk.PrintOperation { IPhoto [] selected_photos; int photos_per_page = 1; CustomPrintWidget.FitMode fit = CustomPrintWidget.FitMode.Scaled; bool repeat, white_borders, crop_marks; string print_label_format; string comment; public PrintOperation (IPhoto [] selectedPhotos) { selected_photos = selectedPhotos; CustomTabLabel = Catalog.GetString ("Image Settings"); NPages = selectedPhotos.Length; DefaultPageSetup = Global.PageSetup; } protected override Gtk.Widget OnCreateCustomWidget () { Gtk.Widget widget = new CustomPrintWidget (this); widget.ShowAll (); (widget as CustomPrintWidget).Changed += OnCustomWidgetChanged; OnCustomWidgetChanged (widget); return widget; } protected override void OnCustomWidgetApply (Gtk.Widget widget) { CustomPrintWidget cpw = widget as CustomPrintWidget; UseFullPage = cpw.UseFullPage; photos_per_page = cpw.PhotosPerPage; repeat = cpw.Repeat; NPages = repeat ? selected_photos.Length :(int) Math.Ceiling (1.0 * selected_photos.Length / photos_per_page); fit = cpw.Fitmode; white_borders = cpw.WhiteBorders; crop_marks = cpw.CropMarks; print_label_format = cpw.PrintLabelFormat; comment = cpw.CustomText; } protected void OnCustomWidgetChanged (Gtk.Widget widget) { OnCustomWidgetApply (widget); using (var surface = new ImageSurface (Format.ARGB32, 360, 254)) { using (var gr = new Context (surface)) { gr.SetSourceColor (new Color (1, 1, 1)); gr.Rectangle (0, 0, 360, 254); gr.Fill (); using (Gdk.Pixbuf pixbuf = Gdk.Pixbuf.LoadFromResource ("flower.png")) { DrawImage (gr, pixbuf,0, 0, 360, 254); } } (widget as CustomPrintWidget).PreviewImage.Pixbuf = CreatePixbuf (surface); } } protected override void OnDrawPage (Gtk.PrintContext context, int page_nr) { base.OnDrawPage (context, page_nr); Context cr = context.CairoContext; int ppx, ppy; switch (photos_per_page) { default: case 1: ppx = ppy =1; break; case 2: ppx = 1; ppy = 2; break; case 4: ppx = ppy = 2; break; case 9: ppx = ppy = 3; break; case 12: ppx = 3; ppy = 4; break; case 20: ppx = 4; ppy = 5; break; case 30: ppx = 5; ppy = 6; break; } //FIXME: if paper is landscape, swap ppx with ppy double w = context.Width / ppx; double h = context.Height / ppy; // compute picture size using 4800DPI double mx=(w / 25.4) * 4800, my=(h / 25.4) * 4800; for (int x = 0; x <= ppx; x++) { for (int y = 0; y <= ppy; y++) { int p_index = repeat ? page_nr : page_nr * photos_per_page + y * ppx + x; if (crop_marks) DrawCropMarks (cr, x*w, y*h, w*.1); if (x == ppx || y == ppy || p_index >= selected_photos.Length) continue; using (var img = ImageFile.Create (selected_photos[p_index].DefaultVersion.Uri)) { Gdk.Pixbuf pixbuf; try { pixbuf = img.Load ((int) mx, (int) my); Cms.Profile printer_profile; if (ColorManagement.Profiles.TryGetValue (Preferences.Get<string> (Preferences.COLOR_MANAGEMENT_OUTPUT_PROFILE), out printer_profile)) ColorManagement.ApplyProfile (pixbuf, img.GetProfile (), printer_profile); } catch (Exception e) { Log.Exception ("Unable to load image " + selected_photos[p_index].DefaultVersion.Uri + "\n", e); // If the image is not found load error pixbuf pixbuf = new Gdk.Pixbuf (PixbufUtils.ErrorPixbuf, 0, 0, PixbufUtils.ErrorPixbuf.Width, PixbufUtils.ErrorPixbuf.Height); } //Gdk.Pixbuf pixbuf = img.Load (100, 100); bool rotated = false; if (Math.Sign ((double)pixbuf.Width/pixbuf.Height - 1.0) != Math.Sign (w/h - 1.0)) { Gdk.Pixbuf d_pixbuf = pixbuf.RotateSimple (Gdk.PixbufRotation.Counterclockwise); pixbuf.Dispose (); pixbuf = d_pixbuf; rotated = true; } DrawImage (cr, pixbuf, x * w, y * h, w, h); string tag_string = ""; foreach (Tag t in selected_photos[p_index].Tags) tag_string = String.Concat (tag_string, t.Name); string label = String.Format (print_label_format, comment, selected_photos[p_index].Name, selected_photos[p_index].Time.ToLocalTime ().ToShortDateString (), selected_photos[p_index].Time.ToLocalTime ().ToShortTimeString (), tag_string, selected_photos[p_index].Description); DrawComment (context, (x + 1) * w, (rotated ? y : y + 1) * h, (rotated ? w : h) * .025, label, rotated); pixbuf.Dispose (); } } } } protected override void OnEndPrint (Gtk.PrintContext context) { base.OnEndPrint (context); context.Dispose (); } void DrawCropMarks (Context cr, double x, double y, double length) { cr.Save (); cr.SetSourceColor (new Color (0, 0, 0)); cr.MoveTo (x - length/2, y); cr.LineTo (x + length/2, y); cr.MoveTo (x, y - length/2); cr.LineTo (x, y + length/2); cr.LineWidth = .2; cr.SetDash (new [] {length*.4, length*.2}, 0); cr.Stroke (); cr.Restore (); } static void DrawComment (Gtk.PrintContext context, double x, double y, double h, string comment, bool rotated) { if (string.IsNullOrEmpty(comment)) return; Context cr = context.CairoContext; cr.Save (); Pango.Layout layout = context.CreatePangoLayout (); Pango.FontDescription desc = Pango.FontDescription.FromString ("sans 14"); layout.FontDescription = desc; layout.SetText (comment); int lay_w, lay_h; layout.GetPixelSize (out lay_w, out lay_h); double scale = h/lay_h; if (rotated) { cr.Translate (x - h, y + lay_w * scale); cr.Rotate (- Math.PI / 2); } else cr.Translate (x - lay_w * scale, y - h); cr.Scale (scale, scale); Pango.CairoHelper.ShowLayout (context.CairoContext, layout); cr.Restore (); } private void DrawImage (Context cr, Gdk.Pixbuf pixbuf, double x, double y, double w, double h) { double scalex, scaley; switch (fit) { case CustomPrintWidget.FitMode.Zoom: scalex = scaley = Math.Max (w/pixbuf.Width, h/pixbuf.Height); break; case CustomPrintWidget.FitMode.Fill: scalex = w/pixbuf.Width; scaley = h/pixbuf.Height; break; default: case CustomPrintWidget.FitMode.Scaled: scalex = scaley = Math.Min (w/pixbuf.Width, h/pixbuf.Height); break; } double rectw = w / scalex; double recth = h / scaley; cr.Save (); if (white_borders) cr.Translate (w * .025, h * .025); cr.Translate (x, y); if (white_borders) cr.Scale (.95, .95); cr.Scale (scalex, scaley); cr.Rectangle (0, 0, rectw, recth); Gdk.CairoHelper.SetSourcePixbuf (cr, pixbuf, (rectw - pixbuf.Width) / 2.0, (recth - pixbuf.Height) / 2.0); cr.Fill (); if (white_borders) { cr.Rectangle (0, 0 ,rectw, recth); cr.SetSourceColor (new Color (0, 0, 0)); cr.LineWidth = 1 / scalex; cr.Stroke (); } cr.Restore (); } //FIXME: f_pixbuf_from_cairo_surface is missing from libfspot [DllImport("libfspot")] static extern IntPtr f_pixbuf_from_cairo_surface (IntPtr handle); static Gdk.Pixbuf CreatePixbuf (Surface s) { IntPtr result = f_pixbuf_from_cairo_surface (s.Handle); return (Gdk.Pixbuf) GLib.Object.GetObject (result, true); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Threading; using System.Threading.Tasks; using Microsoft.Win32.SafeHandles; // TODO: Once we upgrade to C# 6, remove all of these and simply import the Http class. using CURLAUTH = Interop.Http.CURLAUTH; using CURLcode = Interop.Http.CURLcode; using CURLINFO = Interop.Http.CURLINFO; using CURLMcode = Interop.Http.CURLMcode; using CURLMSG = Interop.Http.CURLMSG; using CURLoption = Interop.Http.CURLoption; using SafeCurlMultiHandle = Interop.Http.SafeCurlMultiHandle; using CurlSeekResult = Interop.Http.CurlSeekResult; namespace System.Net.Http { internal partial class CurlHandler : HttpMessageHandler { /// <summary>Provides a multi handle and the associated processing for all requests on the handle.</summary> private sealed class MultiAgent { private static readonly Interop.Http.ReadWriteCallback s_receiveHeadersCallback = CurlReceiveHeadersCallback; private static readonly Interop.Http.ReadWriteCallback s_sendCallback = CurlSendCallback; private static readonly Interop.Http.SeekCallback s_seekCallback = CurlSeekCallback; private static readonly Interop.Http.ReadWriteCallback s_receiveBodyCallback = CurlReceiveBodyCallback; private static readonly Interop.Http.DebugCallback s_debugCallback = CurlDebugFunction; /// <summary> /// A collection of not-yet-processed incoming requests for work to be done /// by this multi agent. This can include making new requests, canceling /// active requests, or unpausing active requests. /// Protected by a lock on <see cref="_incomingRequests"/>. /// </summary> private readonly Queue<IncomingRequest> _incomingRequests = new Queue<IncomingRequest>(); /// <summary>Map of activeOperations, indexed by a GCHandle ptr.</summary> private readonly Dictionary<IntPtr, ActiveRequest> _activeOperations = new Dictionary<IntPtr, ActiveRequest>(); /// <summary> /// Special file descriptor used to wake-up curl_multi_wait calls. This is the read /// end of a pipe, with the write end written to when work is queued or when cancellation /// is requested. This is only valid while the worker is executing. /// </summary> private SafeFileHandle _wakeupRequestedPipeFd; /// <summary> /// Write end of the pipe connected to <see cref="_wakeupRequestedPipeFd"/>. /// This is only valid while the worker is executing. /// </summary> private SafeFileHandle _requestWakeupPipeFd; /// <summary> /// Task for the currently running worker, or null if there is no current worker. /// Protected by a lock on <see cref="_incomingRequests"/>. /// </summary> private Task _runningWorker; /// <summary>Queues a request for the multi handle to process.</summary> public void Queue(IncomingRequest request) { lock (_incomingRequests) { // Add the request, then initiate processing. _incomingRequests.Enqueue(request); EnsureWorkerIsRunning(); } } /// <summary>Gets the ID of the currently running worker, or null if there isn't one.</summary> internal int? RunningWorkerId { get { return _runningWorker != null ? (int?)_runningWorker.Id : null; } } /// <summary>Schedules the processing worker if one hasn't already been scheduled.</summary> private void EnsureWorkerIsRunning() { Debug.Assert(Monitor.IsEntered(_incomingRequests), "Needs to be called under _incomingRequests lock"); if (_runningWorker == null) { EventSourceTrace("MultiAgent worker queueing"); // Create pipe used to forcefully wake up curl_multi_wait calls when something important changes. // This is created here rather than in Process so that the pipe is available immediately // for subsequent queue calls to use. Debug.Assert(_wakeupRequestedPipeFd == null, "Read pipe should have been cleared"); Debug.Assert(_requestWakeupPipeFd == null, "Write pipe should have been cleared"); unsafe { int* fds = stackalloc int[2]; Interop.CheckIo(Interop.Sys.Pipe(fds)); _wakeupRequestedPipeFd = new SafeFileHandle((IntPtr)fds[Interop.Sys.ReadEndOfPipe], true); _requestWakeupPipeFd = new SafeFileHandle((IntPtr)fds[Interop.Sys.WriteEndOfPipe], true); } // Kick off the processing task. It's "DenyChildAttach" to avoid any surprises if // code happens to create attached tasks, and it's LongRunning because this thread // is likely going to sit around for a while in a wait loop (and the more requests // are concurrently issued to the same agent, the longer the thread will be around). const TaskCreationOptions Options = TaskCreationOptions.DenyChildAttach | TaskCreationOptions.LongRunning; _runningWorker = new Task(s => { var thisRef = (MultiAgent)s; try { // Do the actual processing thisRef.EventSourceTrace("MultiAgent worker running"); thisRef.WorkerLoop(); } catch (Exception exc) { thisRef.EventSourceTrace("Unexpected worker failure: {0}", exc); Debug.Fail("Unexpected exception from processing loop: " + exc.ToString()); } finally { thisRef.EventSourceTrace("MultiAgent worker shutting down"); lock (thisRef._incomingRequests) { // Close our wakeup pipe (ignore close errors). // This is done while holding the lock to prevent // subsequent Queue calls to see an improperly configured // set of descriptors. thisRef._wakeupRequestedPipeFd.Dispose(); thisRef._wakeupRequestedPipeFd = null; thisRef._requestWakeupPipeFd.Dispose(); thisRef._requestWakeupPipeFd = null; // In the time between we stopped processing and now, // more requests could have been added. If they were // kick off another processing loop. thisRef._runningWorker = null; if (thisRef._incomingRequests.Count > 0) { thisRef.EnsureWorkerIsRunning(); } } } }, this, CancellationToken.None, Options); _runningWorker.Start(TaskScheduler.Default); // started after _runningWorker field set to avoid race conditions } else // _workerRunning == true { // The worker is already running. If there are already queued requests, we're done. // However, if there aren't any queued requests, Process could be blocked inside of // curl_multi_wait, and we want to make sure it wakes up to see that there additional // requests waiting to be handled. So we write to the wakeup pipe. Debug.Assert(_incomingRequests.Count >= 1, "We just queued a request, so the count should be at least 1"); if (_incomingRequests.Count == 1) { RequestWakeup(); } } } /// <summary>Write a byte to the wakeup pipe.</summary> private void RequestWakeup() { unsafe { EventSourceTrace(null); byte b = 1; Interop.CheckIo(Interop.Sys.Write(_requestWakeupPipeFd, &b, 1)); } } /// <summary>Clears data from the wakeup pipe.</summary> /// <remarks> /// This must only be called when we know there's data to be read. /// The MultiAgent could easily deadlock if it's called when there's no data in the pipe. /// </remarks> private unsafe void ReadFromWakeupPipeWhenKnownToContainData() { // It's possible but unlikely that there will be tons of extra data in the pipe, // more than we end up reading out here (it's unlikely because we only write a byte to the pipe when // transitioning from 0 to 1 incoming request). In that unlikely event, the worst // case will be that the next one or more waits will wake up immediately, with each one // subsequently clearing out more of the pipe. const int ClearBufferSize = 64; // sufficiently large to clear the pipe in any normal case byte* clearBuf = stackalloc byte[ClearBufferSize]; Interop.CheckIo(Interop.Sys.Read(_wakeupRequestedPipeFd, clearBuf, ClearBufferSize)); } /// <summary>Requests that libcurl unpause the connection associated with this request.</summary> internal void RequestUnpause(EasyRequest easy) { EventSourceTrace(null, easy: easy); Queue(new IncomingRequest { Easy = easy, Type = IncomingRequestType.Unpause }); } /// <summary>Creates and configures a new multi handle.</summary> private SafeCurlMultiHandle CreateAndConfigureMultiHandle() { // Create the new handle SafeCurlMultiHandle multiHandle = Interop.Http.MultiCreate(); if (multiHandle.IsInvalid) { throw CreateHttpRequestException(); } // In support of HTTP/2, enable HTTP/2 connections to be multiplexed if possible. // We must only do this if the version of libcurl being used supports HTTP/2 multiplexing. // Due to a change in a libcurl signature, if we try to make this call on an older libcurl, // we'll end up accidentally and unconditionally enabling HTTP 1.1 pipelining. if (s_supportsHttp2Multiplexing) { ThrowIfCURLMError(Interop.Http.MultiSetOptionLong(multiHandle, Interop.Http.CURLMoption.CURLMOPT_PIPELINING, (long)Interop.Http.CurlPipe.CURLPIPE_MULTIPLEX)); } return multiHandle; } private void WorkerLoop() { Debug.Assert(!Monitor.IsEntered(_incomingRequests), "No locks should be held while invoking Process"); Debug.Assert(_runningWorker != null && _runningWorker.Id == Task.CurrentId, "This is the worker, so it must be running"); Debug.Assert(_wakeupRequestedPipeFd != null && !_wakeupRequestedPipeFd.IsInvalid, "Should have a valid pipe for wake ups"); // Create the multi handle to use for this round of processing. This one handle will be used // to service all easy requests currently available and all those that come in while // we're processing other requests. Once the work quiesces and there are no more requests // to process, this multi handle will be released as the worker goes away. The next // time a request arrives and a new worker is spun up, a new multi handle will be created. SafeCurlMultiHandle multiHandle = CreateAndConfigureMultiHandle(); // Clear our active operations table. This should already be clear, either because // all previous operations completed without unexpected exception, or in the case of an // unexpected exception we should have cleaned up gracefully anyway. But just in case... Debug.Assert(_activeOperations.Count == 0, "We shouldn't have any active operations when starting processing."); _activeOperations.Clear(); bool endingSuccessfully = false; try { // Continue processing as long as there are any active operations while (true) { // First handle any requests in the incoming requests queue. while (true) { IncomingRequest request; lock (_incomingRequests) { if (_incomingRequests.Count == 0) break; request = _incomingRequests.Dequeue(); } HandleIncomingRequest(multiHandle, request); } // If we have no active operations, we're done. if (_activeOperations.Count == 0) { endingSuccessfully = true; return; } // We have one or more active operations. Run any work that needs to be run. ThrowIfCURLMError(Interop.Http.MultiPerform(multiHandle)); // Complete and remove any requests that have finished being processed. CURLMSG message; IntPtr easyHandle; CURLcode result; while (Interop.Http.MultiInfoRead(multiHandle, out message, out easyHandle, out result)) { Debug.Assert(message == CURLMSG.CURLMSG_DONE, "CURLMSG_DONE is supposed to be the only message type"); if (message == CURLMSG.CURLMSG_DONE) { IntPtr gcHandlePtr; CURLcode getInfoResult = Interop.Http.EasyGetInfoPointer(easyHandle, CURLINFO.CURLINFO_PRIVATE, out gcHandlePtr); Debug.Assert(getInfoResult == CURLcode.CURLE_OK, "Failed to get info on a completing easy handle"); if (getInfoResult == CURLcode.CURLE_OK) { ActiveRequest completedOperation; bool gotActiveOp = _activeOperations.TryGetValue(gcHandlePtr, out completedOperation); Debug.Assert(gotActiveOp, "Expected to find GCHandle ptr in active operations table"); if (gotActiveOp) { DeactivateActiveRequest(multiHandle, completedOperation.Easy, gcHandlePtr, completedOperation.CancellationRegistration); FinishRequest(completedOperation.Easy, result); } } } } // Wait for more things to do. bool isWakeupRequestedPipeActive; bool isTimeout; ThrowIfCURLMError(Interop.Http.MultiWait(multiHandle, _wakeupRequestedPipeFd, out isWakeupRequestedPipeActive, out isTimeout)); if (isWakeupRequestedPipeActive) { // We woke up (at least in part) because a wake-up was requested. // Read the data out of the pipe to clear it. Debug.Assert(!isTimeout, "should not have timed out if isExtraFileDescriptorActive"); EventSourceTrace("Wait wake-up"); ReadFromWakeupPipeWhenKnownToContainData(); } if (isTimeout) { EventSourceTrace("Wait timeout"); } // PERF NOTE: curl_multi_wait uses poll (assuming it's available), which is O(N) in terms of the number of fds // being waited on. If this ends up being a scalability bottleneck, we can look into using the curl_multi_socket_* // APIs, which would let us switch to using epoll by being notified when sockets file descriptors are added or // removed and configuring the epoll context with EPOLL_CTL_ADD/DEL, which at the expense of a lot of additional // complexity would let us turn the O(N) operation into an O(1) operation. The additional complexity would come // not only in the form of additional callbacks and managing the socket collection, but also in the form of timer // management, which is necessary when using the curl_multi_socket_* APIs and which we avoid by using just // curl_multi_wait/perform. } } finally { // If we got an unexpected exception, something very bad happened. We may have some // operations that we initiated but that weren't completed. Make sure to clean up any // such operations, failing them and releasing their resources. if (_activeOperations.Count > 0) { Debug.Assert(!endingSuccessfully, "We should only have remaining operations if we got an unexpected exception"); foreach (KeyValuePair<IntPtr, ActiveRequest> pair in _activeOperations) { ActiveRequest failingOperation = pair.Value; IntPtr failingOperationGcHandle = pair.Key; DeactivateActiveRequest(multiHandle, failingOperation.Easy, failingOperationGcHandle, failingOperation.CancellationRegistration); // Complete the operation's task and clean up any of its resources failingOperation.Easy.FailRequest(CreateHttpRequestException()); failingOperation.Easy.Cleanup(); // no active processing remains, so cleanup } // Clear the table. _activeOperations.Clear(); } // Finally, dispose of the multi handle. multiHandle.Dispose(); } } private void HandleIncomingRequest(SafeCurlMultiHandle multiHandle, IncomingRequest request) { Debug.Assert(!Monitor.IsEntered(_incomingRequests), "Incoming requests lock should only be held while accessing the queue"); EventSourceTrace("Type: {0}", request.Type, easy: request.Easy); EasyRequest easy = request.Easy; switch (request.Type) { case IncomingRequestType.New: ActivateNewRequest(multiHandle, easy); break; case IncomingRequestType.Cancel: Debug.Assert(easy._associatedMultiAgent == this, "Should only cancel associated easy requests"); Debug.Assert(easy._cancellationToken.IsCancellationRequested, "Cancellation should have been requested"); FindAndFailActiveRequest(multiHandle, easy, new OperationCanceledException(easy._cancellationToken)); break; case IncomingRequestType.Unpause: Debug.Assert(easy._associatedMultiAgent == this, "Should only unpause associated easy requests"); if (!easy._easyHandle.IsClosed) { IntPtr gcHandlePtr; ActiveRequest ar; Debug.Assert(FindActiveRequest(easy, out gcHandlePtr, out ar), "Couldn't find active request for unpause"); CURLcode unpauseResult = Interop.Http.EasyUnpause(easy._easyHandle); try { ThrowIfCURLEError(unpauseResult); } catch (Exception exc) { FindAndFailActiveRequest(multiHandle, easy, exc); } } break; default: Debug.Fail("Invalid request type: " + request.Type); break; } } private void ActivateNewRequest(SafeCurlMultiHandle multiHandle, EasyRequest easy) { Debug.Assert(easy != null, "We should never get a null request"); Debug.Assert(easy._associatedMultiAgent == null, "New requests should not be associated with an agent yet"); // If cancellation has been requested, complete the request proactively if (easy._cancellationToken.IsCancellationRequested) { easy.FailRequest(new OperationCanceledException(easy._cancellationToken)); easy.Cleanup(); // no active processing remains, so cleanup return; } // Otherwise, configure it. Most of the configuration was already done when the EasyRequest // was created, but there's additional configuration we need to do specific to this // multi agent, specifically telling the easy request about its own GCHandle and setting // up callbacks for data processing. Once it's configured, add it to the multi handle. GCHandle gcHandle = GCHandle.Alloc(easy); IntPtr gcHandlePtr = GCHandle.ToIntPtr(gcHandle); try { easy._associatedMultiAgent = this; easy.SetCurlOption(CURLoption.CURLOPT_PRIVATE, gcHandlePtr); easy.SetCurlCallbacks(gcHandlePtr, s_receiveHeadersCallback, s_sendCallback, s_seekCallback, s_receiveBodyCallback, s_debugCallback); ThrowIfCURLMError(Interop.Http.MultiAddHandle(multiHandle, easy._easyHandle)); } catch (Exception exc) { gcHandle.Free(); easy.FailRequest(exc); easy.Cleanup(); // no active processing remains, so cleanup return; } // And if cancellation can be requested, hook up a cancellation callback. // This callback will put the easy request back into the queue, which will // ensure that a wake-up request has been issued. When we pull // the easy request out of the request queue, we'll see that it's already // associated with this agent, meaning that it's a cancellation request, // and we'll deal with it appropriately. var cancellationReg = default(CancellationTokenRegistration); if (easy._cancellationToken.CanBeCanceled) { cancellationReg = easy._cancellationToken.Register(s => { var state = (Tuple<MultiAgent, EasyRequest>)s; state.Item1.Queue(new IncomingRequest { Easy = state.Item2, Type = IncomingRequestType.Cancel }); }, Tuple.Create<MultiAgent, EasyRequest>(this, easy)); } // Finally, add it to our map. _activeOperations.Add( gcHandlePtr, new ActiveRequest { Easy = easy, CancellationRegistration = cancellationReg }); } private void DeactivateActiveRequest( SafeCurlMultiHandle multiHandle, EasyRequest easy, IntPtr gcHandlePtr, CancellationTokenRegistration cancellationRegistration) { // Remove the operation from the multi handle so we can shut down the multi handle cleanly CURLMcode removeResult = Interop.Http.MultiRemoveHandle(multiHandle, easy._easyHandle); Debug.Assert(removeResult == CURLMcode.CURLM_OK, "Failed to remove easy handle"); // ignore cleanup errors in release // Release the associated GCHandle so that it's not kept alive forever if (gcHandlePtr != IntPtr.Zero) { try { GCHandle.FromIntPtr(gcHandlePtr).Free(); _activeOperations.Remove(gcHandlePtr); } catch (InvalidOperationException) { Debug.Fail("Couldn't get/free the GCHandle for an active operation while shutting down due to failure"); } } // Undo cancellation registration cancellationRegistration.Dispose(); } private bool FindActiveRequest(EasyRequest easy, out IntPtr gcHandlePtr, out ActiveRequest activeRequest) { // We maintain an IntPtr=>ActiveRequest mapping, which makes it cheap to look-up by GCHandle ptr but // expensive to look up by EasyRequest. If we find this becoming a bottleneck, we can add a reverse // map that stores the other direction as well. foreach (KeyValuePair<IntPtr, ActiveRequest> pair in _activeOperations) { if (pair.Value.Easy == easy) { gcHandlePtr = pair.Key; activeRequest = pair.Value; return true; } } gcHandlePtr = IntPtr.Zero; activeRequest = default(ActiveRequest); return false; } private void FindAndFailActiveRequest(SafeCurlMultiHandle multiHandle, EasyRequest easy, Exception error) { EventSourceTrace("Error: {0}", error, easy: easy); IntPtr gcHandlePtr; ActiveRequest activeRequest; if (FindActiveRequest(easy, out gcHandlePtr, out activeRequest)) { DeactivateActiveRequest(multiHandle, easy, gcHandlePtr, activeRequest.CancellationRegistration); easy.FailRequest(error); easy.Cleanup(); // no active processing remains, so we can cleanup } else { Debug.Assert(easy.Task.IsCompleted, "We should only not be able to find the request if it failed or we started to send back the response."); } } private void FinishRequest(EasyRequest completedOperation, CURLcode messageResult) { EventSourceTrace("Curl result: {0}", messageResult, easy: completedOperation); if (completedOperation._responseMessage.StatusCode != HttpStatusCode.Unauthorized) { // If preauthentication is enabled, then we want to transfer credentials to the handler's credential cache. // That entails asking the easy operation which auth types are supported, and then giving that info to the // handler, which along with the request URI and its server credentials will populate the cache appropriately. if (completedOperation._handler.PreAuthenticate) { long authAvailable; if (Interop.Http.EasyGetInfoLong(completedOperation._easyHandle, CURLINFO.CURLINFO_HTTPAUTH_AVAIL, out authAvailable) == CURLcode.CURLE_OK) { completedOperation._handler.TransferCredentialsToCache( completedOperation._requestMessage.RequestUri, (CURLAUTH)authAvailable); } // Ignore errors: no need to fail for the sake of putting the credentials into the cache } } // Complete or fail the request try { bool unsupportedProtocolRedirect = messageResult == CURLcode.CURLE_UNSUPPORTED_PROTOCOL && completedOperation._isRedirect; if (!unsupportedProtocolRedirect) { ThrowIfCURLEError(messageResult); } completedOperation.EnsureResponseMessagePublished(); } catch (Exception exc) { completedOperation.FailRequest(exc); } // At this point, we've completed processing the entire request, either due to error // or due to completing the entire response. completedOperation.Cleanup(); } private static void CurlDebugFunction(IntPtr curl, Interop.Http.CurlInfoType type, IntPtr data, ulong size, IntPtr context) { EasyRequest easy; TryGetEasyRequestFromContext(context, out easy); try { switch (type) { case Interop.Http.CurlInfoType.CURLINFO_TEXT: case Interop.Http.CurlInfoType.CURLINFO_HEADER_IN: case Interop.Http.CurlInfoType.CURLINFO_HEADER_OUT: string text = Marshal.PtrToStringAnsi(data, (int)size).Trim(); if (text.Length > 0) { CurlHandler.EventSourceTrace("{0}: {1}", type, text, 0, easy: easy); } break; default: CurlHandler.EventSourceTrace("{0}: {1} bytes", type, size, 0, easy: easy); break; } } catch (Exception exc) { CurlHandler.EventSourceTrace("Error: {0}", exc, easy: easy); } } private static ulong CurlReceiveHeadersCallback(IntPtr buffer, ulong size, ulong nitems, IntPtr context) { size *= nitems; if (size == 0) { return 0; } EasyRequest easy; if (TryGetEasyRequestFromContext(context, out easy)) { CurlHandler.EventSourceTrace("Size: {0}", size, easy: easy); try { // The callback is invoked once per header; multi-line headers get merged into a single line. string responseHeader = Marshal.PtrToStringAnsi(buffer).Trim(); HttpResponseMessage response = easy._responseMessage; if (!TryParseStatusLine(response, responseHeader, easy)) { int index = 0; string headerName = CurlResponseParseUtils.ReadHeaderName(responseHeader, out index); if (headerName != null) { string headerValue = responseHeader.Substring(index).Trim(); if (!response.Headers.TryAddWithoutValidation(headerName, headerValue)) { response.Content.Headers.TryAddWithoutValidation(headerName, headerValue); } else if (easy._isRedirect && string.Equals(headerName, HttpKnownHeaderNames.Location, StringComparison.OrdinalIgnoreCase)) { HandleRedirectLocationHeader(easy, headerValue); } else if (string.Equals(headerName, HttpKnownHeaderNames.SetCookie, StringComparison.OrdinalIgnoreCase)) { easy._handler.AddResponseCookies(easy, headerValue); } } } return size; } catch (Exception ex) { easy.FailRequest(ex); // cleanup will be handled by main processing loop } } // Returing a value other than size fails the callback and forces // request completion with an error CurlHandler.EventSourceTrace("Aborting request", easy: easy); return size - 1; } private static ulong CurlReceiveBodyCallback( IntPtr buffer, ulong size, ulong nitems, IntPtr context) { size *= nitems; EasyRequest easy; if (TryGetEasyRequestFromContext(context, out easy)) { CurlHandler.EventSourceTrace("Size: {0}", size, easy: easy); try { if (!(easy.Task.IsCanceled || easy.Task.IsFaulted)) { // Complete the task if it hasn't already been. This will make the // stream available to consumers. A previous write callback // may have already completed the task to publish the response. easy.EnsureResponseMessagePublished(); // Try to transfer the data to a reader. This will return either the // amount of data transferred (equal to the amount requested // to be transferred), or it will return a pause request. return easy._responseMessage.ResponseStream.TransferDataToStream(buffer, (long)size); } } catch (Exception ex) { easy.FailRequest(ex); // cleanup will be handled by main processing loop } } // Returing a value other than size fails the callback and forces // request completion with an error. CurlHandler.EventSourceTrace("Aborting request", easy: easy); return (size > 0) ? size - 1 : 1; } private static ulong CurlSendCallback(IntPtr buffer, ulong size, ulong nitems, IntPtr context) { int length = checked((int)(size * nitems)); Debug.Assert(length <= RequestBufferSize, "length " + length + " should not be larger than RequestBufferSize " + RequestBufferSize); if (length == 0) { return 0; } EasyRequest easy; if (TryGetEasyRequestFromContext(context, out easy)) { CurlHandler.EventSourceTrace("Size: {0}", length, easy: easy); Debug.Assert(easy._requestContentStream != null, "We should only be in the send callback if we have a request content stream"); Debug.Assert(easy._associatedMultiAgent != null, "The request should be associated with a multi agent."); try { // Transfer data from the request's content stream to libcurl return TransferDataFromRequestStream(buffer, length, easy); } catch (Exception ex) { easy.FailRequest(ex); // cleanup will be handled by main processing loop } } // Something went wrong. CurlHandler.EventSourceTrace("Aborting request", easy: easy); return Interop.Http.CURL_READFUNC_ABORT; } /// <summary> /// Transfers up to <paramref name="length"/> data from the <paramref name="easy"/>'s /// request content (non-memory) stream to the buffer. /// </summary> /// <returns>The number of bytes transferred.</returns> private static ulong TransferDataFromRequestStream(IntPtr buffer, int length, EasyRequest easy) { CurlHandler.EventSourceTrace("Length: {0}", length, easy: easy); MultiAgent multi = easy._associatedMultiAgent; // First check to see whether there's any data available from a previous asynchronous read request. // If there is, the transfer state's Task field will be non-null, with its Result representing // the number of bytes read. The Buffer will then contain all of that read data. If the Count // is 0, then this is the first time we're checking that Task, and so we populate the Count // from that read result. After that, we can transfer as much data remains between Offset and // Count. Multiple callbacks may pull from that one read. EasyRequest.SendTransferState sts = easy._sendTransferState; if (sts != null) { // Is there a previous read that may still have data to be consumed? if (sts._task != null) { if (!sts._task.IsCompleted) { // We have a previous read that's not yet completed. This should be quite rare, but it can // happen when we're unpaused prematurely, potentially due to the request still finishing // being sent as the server starts to send a response. Since we still have the outstanding // read, we simply re-pause. When the task completes (which could have happened immediately // after the check). the continuation we previously created will fire and queue an unpause. // Since all of this processing is single-threaded on the current thread, that unpause request // is guaranteed to happen after this re-pause. multi.EventSourceTrace("Re-pausing reading after a spurious un-pause", easy: easy); return Interop.Http.CURL_READFUNC_PAUSE; } // Determine how many bytes were read on the last asynchronous read. // If nothing was read, then we're done and can simply return 0 to indicate // the end of the stream. int bytesRead = sts._task.GetAwaiter().GetResult(); // will throw if read failed Debug.Assert(bytesRead >= 0 && bytesRead <= sts._buffer.Length, "ReadAsync returned an invalid result length: " + bytesRead); if (bytesRead == 0) { sts.SetTaskOffsetCount(null, 0, 0); return 0; } // If Count is still 0, then this is the first time after the task completed // that we're examining the data: transfer the bytesRead to the Count. if (sts._count == 0) { multi.EventSourceTrace("ReadAsync completed with bytes: {0}", bytesRead, easy: easy); sts._count = bytesRead; } // Now Offset and Count are both accurate. Determine how much data we can copy to libcurl... int availableData = sts._count - sts._offset; Debug.Assert(availableData > 0, "There must be some data still available."); // ... and copy as much of that as libcurl will allow. int bytesToCopy = Math.Min(availableData, length); Marshal.Copy(sts._buffer, sts._offset, buffer, bytesToCopy); multi.EventSourceTrace("Copied {0} bytes from request stream", bytesToCopy, easy: easy); // Update the offset. If we've gone through all of the data, reset the state // so that the next time we're called back we'll do a new read. sts._offset += bytesToCopy; Debug.Assert(sts._offset <= sts._count, "Offset should never exceed count"); if (sts._offset == sts._count) { sts.SetTaskOffsetCount(null, 0, 0); } // Return the amount of data copied Debug.Assert(bytesToCopy > 0, "We should never return 0 bytes here."); return (ulong)bytesToCopy; } // sts was non-null but sts.Task was null, meaning there was no previous task/data // from which to satisfy any of this request. } else // sts == null { // Allocate a transfer state object to use for the remainder of this request. easy._sendTransferState = sts = new EasyRequest.SendTransferState(); } Debug.Assert(sts != null, "By this point we should have a transfer object"); Debug.Assert(sts._task == null, "There shouldn't be a task now."); Debug.Assert(sts._count == 0, "Count should be zero."); Debug.Assert(sts._offset == 0, "Offset should be zero."); // If we get here, there was no previously read data available to copy. // Initiate a new asynchronous read. Task<int> asyncRead = easy._requestContentStream.ReadAsyncInternal( sts._buffer, 0, Math.Min(sts._buffer.Length, length), easy._cancellationToken); Debug.Assert(asyncRead != null, "Badly implemented stream returned a null task from ReadAsync"); // Even though it's "Async", it's possible this read could complete synchronously or extremely quickly. // Check to see if it did, in which case we can also satisfy the libcurl request synchronously in this callback. if (asyncRead.IsCompleted) { // Get the amount of data read. int bytesRead = asyncRead.GetAwaiter().GetResult(); // will throw if read failed if (bytesRead == 0) { return 0; } // Copy as much as we can. int bytesToCopy = Math.Min(bytesRead, length); Debug.Assert(bytesToCopy > 0 && bytesToCopy <= sts._buffer.Length, "ReadAsync quickly returned an invalid result length: " + bytesToCopy); Marshal.Copy(sts._buffer, 0, buffer, bytesToCopy); // If we read more than we were able to copy, stash it away for the next read. if (bytesToCopy < bytesRead) { sts.SetTaskOffsetCount(asyncRead, bytesToCopy, bytesRead); } // Return the number of bytes read. return (ulong)bytesToCopy; } // Otherwise, the read completed asynchronously. Store the task, and hook up a continuation // such that the connection will be unpaused once the task completes. sts.SetTaskOffsetCount(asyncRead, 0, 0); asyncRead.ContinueWith((t, s) => { EasyRequest easyRef = (EasyRequest)s; easyRef._associatedMultiAgent.RequestUnpause(easyRef); }, easy, CancellationToken.None, TaskContinuationOptions.ExecuteSynchronously, TaskScheduler.Default); // Then pause the connection. multi.EventSourceTrace("Pausing the connection.", easy: easy); return Interop.Http.CURL_READFUNC_PAUSE; } private static CurlSeekResult CurlSeekCallback(IntPtr context, long offset, int origin) { CurlHandler.EventSourceTrace("Offset: {0}, Origin: {1}", offset, origin, 0); EasyRequest easy; if (TryGetEasyRequestFromContext(context, out easy)) { try { // If libcul is requesting we seek back to the beginning and if the request // content stream is in a position to reset itself, reset and let libcurl // know we did the seek; otherwise, let it know we can't seek. if (offset == 0 && origin == (int)SeekOrigin.Begin && easy._requestContentStream != null && easy._requestContentStream.TryReset()) { // Dump any state associated with the old stream's position if (easy._sendTransferState != null) { easy._sendTransferState.SetTaskOffsetCount(null, 0, 0); } // Restart the transfer easy._requestContentStream.Run(); return CurlSeekResult.CURL_SEEKFUNC_OK; } else { return CurlSeekResult.CURL_SEEKFUNC_CANTSEEK; } } catch (Exception ex) { easy.FailRequest(ex); // cleanup will be handled by main processing loop } } // Something went wrong return CurlSeekResult.CURL_SEEKFUNC_FAIL; } private static bool TryGetEasyRequestFromContext(IntPtr context, out EasyRequest easy) { // Get the EasyRequest from the context try { GCHandle handle = GCHandle.FromIntPtr(context); easy = (EasyRequest)handle.Target; Debug.Assert(easy != null, "Expected non-null EasyRequest in GCHandle"); return easy != null; } catch (InvalidCastException) { Debug.Fail("EasyRequest wasn't the GCHandle's Target"); } catch (InvalidOperationException) { Debug.Fail("Invalid GCHandle"); } easy = null; return false; } private void EventSourceTrace<TArg0>(string formatMessage, TArg0 arg0, EasyRequest easy = null, [CallerMemberName] string memberName = null) { CurlHandler.EventSourceTrace(formatMessage, arg0, this, easy, memberName); } private void EventSourceTrace(string message, EasyRequest easy = null, [CallerMemberName] string memberName = null) { CurlHandler.EventSourceTrace(message, this, easy, memberName); } /// <summary>Represents an active request currently being processed by the agent.</summary> private struct ActiveRequest { public EasyRequest Easy; public CancellationTokenRegistration CancellationRegistration; } /// <summary>Represents an incoming request to be processed by the agent.</summary> internal struct IncomingRequest { public IncomingRequestType Type; public EasyRequest Easy; } /// <summary>The type of an incoming request to be processed by the agent.</summary> internal enum IncomingRequestType : byte { /// <summary>A new request that's never been submitted to an agent.</summary> New, /// <summary>A request to cancel a request previously submitted to the agent.</summary> Cancel, /// <summary>A request to unpause the connection associated with a request previously submitted to the agent.</summary> Unpause } } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Fixtures.Azure.AcceptanceTestsAzureSpecials { using System; using System.Linq; using System.Collections.Generic; using System.Net; using System.Net.Http; using System.Net.Http.Headers; using System.Text; using System.Text.RegularExpressions; using System.Threading; using System.Threading.Tasks; using Microsoft.Rest; using Microsoft.Rest.Serialization; using Newtonsoft.Json; using Microsoft.Rest.Azure; using Models; /// <summary> /// XMsClientRequestIdOperations operations. /// </summary> internal partial class XMsClientRequestIdOperations : IServiceOperations<AutoRestAzureSpecialParametersTestClient>, IXMsClientRequestIdOperations { /// <summary> /// Initializes a new instance of the XMsClientRequestIdOperations class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> internal XMsClientRequestIdOperations(AutoRestAzureSpecialParametersTestClient client) { if (client == null) { throw new ArgumentNullException("client"); } this.Client = client; } /// <summary> /// Gets a reference to the AutoRestAzureSpecialParametersTestClient /// </summary> public AutoRestAzureSpecialParametersTestClient Client { get; private set; } /// <summary> /// Get method that overwrites x-ms-client-request header with value /// 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0. /// </summary> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public async Task<AzureOperationResponse> GetWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "Get", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "azurespecials/overwrite/x-ms-client-request-id/method/").ToString(); List<string> _queryParameters = new List<string>(); if (_queryParameters.Count > 0) { _url += "?" + string.Join("&", _queryParameters); } // Create HTTP transport objects HttpRequestMessage _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("GET"); _httpRequest.RequestUri = new Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", Guid.NewGuid().ToString()); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Get method that overwrites x-ms-client-request header with value /// 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0. /// </summary> /// <param name='xMsClientRequestId'> /// This should appear as a method parameter, use value /// '9C4D50EE-2D56-4CD3-8152-34347DC9F2B0' /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public async Task<AzureOperationResponse> ParamGetWithHttpMessagesAsync(string xMsClientRequestId, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (xMsClientRequestId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "xMsClientRequestId"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("xMsClientRequestId", xMsClientRequestId); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "ParamGet", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "azurespecials/overwrite/x-ms-client-request-id/via-param/method/").ToString(); List<string> _queryParameters = new List<string>(); if (_queryParameters.Count > 0) { _url += "?" + string.Join("&", _queryParameters); } // Create HTTP transport objects HttpRequestMessage _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("GET"); _httpRequest.RequestUri = new Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", Guid.NewGuid().ToString()); } if (xMsClientRequestId != null) { if (_httpRequest.Headers.Contains("x-ms-client-request-id")) { _httpRequest.Headers.Remove("x-ms-client-request-id"); } _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", xMsClientRequestId); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); Error _errorBody = SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } } }
using System; using System.IO; using System.Net; using ServiceStack.Common.Web; using ServiceStack.Service; using ServiceStack.ServiceClient.Web; using ServiceStack.ServiceHost; using ServiceStack.Text; using ServiceStack.WebHost.Endpoints.Support.Mocks; using ServiceStack.WebHost.Endpoints.Tests.Mocks; namespace ServiceStack.WebHost.Endpoints.Tests.Support { public class DirectServiceClient : IServiceClient, IRestClient { ServiceManager ServiceManager { get; set; } readonly HttpRequestMock httpReq = new HttpRequestMock(); readonly HttpResponseMock httpRes = new HttpResponseMock(); public DirectServiceClient(ServiceManager serviceManager) { this.ServiceManager = serviceManager; } public void SendOneWay(object request) { ServiceManager.Execute(request); } public void SendOneWay(string relativeOrAbsoluteUrl, object request) { ServiceManager.Execute(request); } private bool ApplyRequestFilters<TResponse>(object request) { if (EndpointHost.ApplyRequestFilters(httpReq, httpRes, request)) { ThrowIfError<TResponse>(httpRes); return true; } return false; } private void ThrowIfError<TResponse>(HttpResponseMock httpRes) { if (httpRes.StatusCode >= 400) { var webEx = new WebServiceException("WebServiceException, StatusCode: " + httpRes.StatusCode) { StatusCode = httpRes.StatusCode, StatusDescription = httpRes.StatusDescription, }; try { var deserializer = EndpointHost.AppHost.ContentTypeFilters.GetStreamDeserializer(httpReq.ResponseContentType); webEx.ResponseDto = deserializer(typeof(TResponse), httpRes.OutputStream); } catch (Exception ex) { Console.WriteLine(ex); } throw webEx; } } private bool ApplyResponseFilters<TResponse>(object response) { if (EndpointHost.ApplyResponseFilters(httpReq, httpRes, response)) { ThrowIfError<TResponse>(httpRes); return true; } return false; } public TResponse Send<TResponse>(object request) { httpReq.HttpMethod = HttpMethods.Post; if (ApplyRequestFilters<TResponse>(request)) return default(TResponse); var response = ServiceManager.ServiceController.Execute(request, new HttpRequestContext(httpReq, httpRes, request, EndpointAttributes.HttpPost)); if (ApplyResponseFilters<TResponse>(response)) return (TResponse)response; return (TResponse)response; } public TResponse Send<TResponse>(IReturn<TResponse> request) { throw new NotImplementedException(); } public void Send(IReturnVoid request) { throw new NotImplementedException(); } public TResponse Get<TResponse>(IReturn<TResponse> request) { throw new NotImplementedException(); } public void Get(IReturnVoid request) { throw new NotImplementedException(); } public TResponse Get<TResponse>(string relativeOrAbsoluteUrl) { httpReq.HttpMethod = HttpMethods.Get; var requestTypeName = typeof(TResponse).Namespace + "." + relativeOrAbsoluteUrl; var requestType = typeof (TResponse).Assembly.GetType(requestTypeName); if (requestType == null) throw new ArgumentException("Type not found: " + requestTypeName); var request = requestType.CreateInstance(); if (ApplyRequestFilters<TResponse>(request)) return default(TResponse); var response = ServiceManager.ServiceController.Execute(request, new HttpRequestContext(httpReq, httpRes, request, EndpointAttributes.HttpGet)); if (ApplyResponseFilters<TResponse>(response)) return (TResponse)response; return (TResponse)response; } public TResponse Delete<TResponse>(IReturn<TResponse> request) { throw new NotImplementedException(); } public void Delete(IReturnVoid request) { throw new NotImplementedException(); } public TResponse Delete<TResponse>(string relativeOrAbsoluteUrl) { throw new NotImplementedException(); } public TResponse Post<TResponse>(IReturn<TResponse> request) { throw new NotImplementedException(); } public TResponse Post<TResponse>(object request) { throw new NotImplementedException(); } public void Post(IReturnVoid request) { throw new NotImplementedException(); } public TResponse Post<TResponse>(string relativeOrAbsoluteUrl, object request) { throw new NotImplementedException(); } public TResponse Put<TResponse>(IReturn<TResponse> request) { throw new NotImplementedException(); } public void Put(IReturnVoid request) { throw new NotImplementedException(); } public TResponse Put<TResponse>(string relativeOrAbsoluteUrl, object request) { throw new NotImplementedException(); } public TResponse Patch<TResponse>(IReturn<TResponse> request) { throw new NotImplementedException(); } public void Patch(IReturnVoid request) { throw new NotImplementedException(); } public TResponse Patch<TResponse>(string relativeOrAbsoluteUrl, object request) { throw new NotImplementedException(); } public TResponse PostFile<TResponse>(string relativeOrAbsoluteUrl, FileInfo fileToUpload, string mimeType) { throw new NotImplementedException(); } public void CustomMethod(string httpVerb, IReturnVoid request) { throw new NotImplementedException(); } public TResponse CustomMethod<TResponse>(string httpVerb, IReturn<TResponse> request) { throw new NotImplementedException(); } public HttpWebResponse Head(IReturn request) { throw new NotImplementedException(); } public HttpWebResponse Head(string relativeOrAbsoluteUrl) { throw new NotImplementedException(); } public TResponse PostFile<TResponse>(string relativeOrAbsoluteUrl, Stream fileToUpload, string fileInfo, string mimeType) { throw new NotImplementedException(); } public void SendAsync<TResponse>(object request, Action<TResponse> onSuccess, Action<TResponse, Exception> onError) { var response = default(TResponse); try { try { if (ApplyRequestFilters<TResponse>(request)) { onSuccess(default(TResponse)); return; } } catch (Exception ex) { onError(default(TResponse), ex); return; } response = this.Send<TResponse>(request); try { if (ApplyResponseFilters<TResponse>(request)) { onSuccess(response); return; } } catch (Exception ex) { onError(response, ex); return; } onSuccess(response); } catch (Exception ex) { if (onError != null) { onError(response, ex); return; } Console.WriteLine("Error: " + ex.Message); } } public void SetCredentials(string userName, string password) { throw new NotImplementedException(); } public void GetAsync<TResponse>(IReturn<TResponse> request, Action<TResponse> onSuccess, Action<TResponse, Exception> onError) { throw new NotImplementedException(); } public void GetAsync<TResponse>(string relativeOrAbsoluteUrl, Action<TResponse> onSuccess, Action<TResponse, Exception> onError) { throw new NotImplementedException(); } public void DeleteAsync<TResponse>(string relativeOrAbsoluteUrl, Action<TResponse> onSuccess, Action<TResponse, Exception> onError) { throw new NotImplementedException(); } public void DeleteAsync<TResponse>(IReturn<TResponse> request, Action<TResponse> onSuccess, Action<TResponse, Exception> onError) { throw new NotImplementedException(); } public void PostAsync<TResponse>(IReturn<TResponse> request, Action<TResponse> onSuccess, Action<TResponse, Exception> onError) { throw new NotImplementedException(); } public void PostAsync<TResponse>(object request, Action<TResponse> onSuccess, Action<TResponse, Exception> onError) { throw new NotImplementedException(); } public void PostAsync<TResponse>(string relativeOrAbsoluteUrl, object request, Action<TResponse> onSuccess, Action<TResponse, Exception> onError) { throw new NotImplementedException(); } public void PutAsync<TResponse>(IReturn<TResponse> request, Action<TResponse> onSuccess, Action<TResponse, Exception> onError) { throw new NotImplementedException(); } public void PutAsync<TResponse>(string relativeOrAbsoluteUrl, object request, Action<TResponse> onSuccess, Action<TResponse, Exception> onError) { throw new NotImplementedException(); } public void CustomMethodAsync<TResponse>(string httpVerb, IReturn<TResponse> request, Action<TResponse> onSuccess, Action<TResponse, Exception> onError) { throw new NotImplementedException(); } public void CancelAsync() { throw new NotImplementedException(); } public void Dispose() { } public TResponse PostFileWithRequest<TResponse>(string relativeOrAbsoluteUrl, FileInfo fileToUpload, object request) { throw new NotImplementedException(); } public TResponse PostFileWithRequest<TResponse>(string relativeOrAbsoluteUrl, Stream fileToUpload, string fileName, object request) { throw new NotImplementedException(); } } }
//MIT, 2014-present, WinterDev //---------------------------------------------------------------------------- // Anti-Grain Geometry - Version 2.4 // Copyright (C) 2002-2005 Maxim Shemanarev (http://www.antigrain.com) // // C# Port port by: Lars Brubaker // [email protected] // Copyright (C) 2007 // // Permission to copy, use, modify, sell and distribute this software // is granted provided this copyright notice appears in all copies. // This software is provided "as is" without express or implied // warranty, and with no claim as to its suitability for any purpose. // //---------------------------------------------------------------------------- // Contact: [email protected] // [email protected] // http://www.antigrain.com //---------------------------------------------------------------------------- using System; using PixelFarm.Drawing; using PixelFarm.CpuBlit.Imaging; using PixelFarm.CpuBlit.VertexProcessing; using PixelFarm.CpuBlit.Rasterization; using PixelFarm.CpuBlit.PixelProcessing; // namespace PixelFarm.CpuBlit { public sealed partial class AggRenderSurface { MemBitmap _destBmp; readonly ScanlinePacked8 _sclinePack8; readonly ScanlineRasterizer _sclineRas; MyBitmapBlender _destBitmapBlender; PixelBlenderBGRA _pixelBlenderBGRA; DestBitmapRasterizer _bmpRasterizer; double _ox; //canvas origin x double _oy; //canvas origin y int _destWidth; int _destHeight; internal event EventHandler DstBitmapAttached; internal event EventHandler DstBitmapDetached; public AggRenderSurface() { //1. attach dst bmp before use this //2. you can detach this surface and attach to another bmp surface _pixelBlenderBGRA = new PixelBlenderBGRA(); _destBitmapBlender = new MyBitmapBlender(); _bmpRasterizer = new DestBitmapRasterizer(); _sclinePack8 = new ScanlinePacked8(); _sclineRas = new ScanlineRasterizer(); _currentImgSpanGen = _imgSpanGenBilinearClip; CurrentTransformMatrix = Affine.IdentityMatrix; } public void SetCustomPixelBlender(PixelBlender32 customPixelBlender) { _destBitmapBlender.OutputPixelBlender = (customPixelBlender != null) ? customPixelBlender : _pixelBlenderBGRA; } public void SetGamma(PrebuiltGammaTable prebuiltGammaTable) { _sclineRas.SetGammaLut(prebuiltGammaTable); } public void AttachDstBitmap(MemBitmap dstBmp) { _destBmp = dstBmp; _destBitmapBlender.Attach(dstBmp, _pixelBlenderBGRA); _sclineRas.SetClipBox( new Q1Rect(0, 0, _destWidth = dstBmp.Width, //** _destHeight = dstBmp.Height) //** ); DstBitmapAttached?.Invoke(this, EventArgs.Empty); } public void DetachDstBitmap() { _destBmp = null; _destBitmapBlender.Detach(); _destWidth = _destHeight = 0; DstBitmapDetached?.Invoke(this, EventArgs.Empty); } // public int Width => _destWidth; public int Height => _destHeight; public MemBitmap DestBitmap => _destBmp; //low-level Agg infrastructures public BitmapBlenderBase DestBitmapBlender => _destBitmapBlender; public ScanlineRasterizer ScanlineRasterizer => _sclineRas; public ScanlinePacked8 ScanlinePacked8 => _sclinePack8; public DestBitmapRasterizer BitmapRasterizer => _bmpRasterizer; public FillingRule FillingRule { get => _sclineRas.ScanlineFillingRule; set => _sclineRas.ScanlineFillingRule = value; } public float ScanlineRasOriginX => _sclineRas.OffsetOriginX; public float ScanlineRasOriginY => _sclineRas.OffsetOriginY; // // public PixelBlender32 PixelBlender { get => _destBitmapBlender.OutputPixelBlender; set => _destBitmapBlender.OutputPixelBlender = value; } public ICoordTransformer CurrentTransformMatrix { get; set; } // public Q1Rect GetClippingRect() => ScanlineRasterizer.GetVectorClipBox(); public void SetClippingRect(Q1Rect rect) { if (Q1Rect.IntersectRectangles(rect, new Q1Rect(0, 0, this.Width, this.Height), out Q1Rect result)) { ScanlineRasterizer.SetClipBox(result); } } public PixelFarm.CpuBlit.FragmentProcessing.ImgSpanGen CustomImgSpanGen { get => _imgSpanGenCustom; set { _imgSpanGenCustom = value; if (value == null) { //switch to default _currentImgSpanGen = _imgSpanGenBilinearClip; } else { _currentImgSpanGen = value; } } } public ImageInterpolationQuality ImageInterpolationQuality { get; set; } public void Clear(Color color) { BitmapBlenderBase destImage = this.DestBitmapBlender; #if DEBUG if (destImage.BitDepth != 32) throw new NotSupportedException(); #endif MemBitmapExt.Clear(destImage.GetBufferPtr(), color); //unsafe //{ // int* buffer = (int*)tmp.Ptr; // int len32 = tmp.LengthInBytes / 4; // switch (destImage.BitDepth) // { // default: throw new NotSupportedException(); // case 32: // { // //------------------------------ // //fast clear buffer // //skip clipping **** // //TODO: reimplement clipping*** // //------------------------------ // if (color == Color.White) // { // //fast cleat with white color // int n = len32; // unsafe // { // //fixed (void* head = &buffer[0]) // { // uint* head_i32 = (uint*)buffer; // for (int i = n - 1; i >= 0; --i) // { // *head_i32 = 0xffffffff; //white (ARGB) // head_i32++; // } // } // } // } // else if (color == Color.Black) // { // //fast clear with black color // int n = len32; // unsafe // { // //fixed (void* head = &buffer[0]) // { // uint* head_i32 = (uint*)buffer; // for (int i = n - 1; i >= 0; --i) // { // *head_i32 = 0xff000000; //black (ARGB) // head_i32++; // } // } // } // } // else if (color == Color.Empty) // { // int n = len32; // unsafe // { // //fixed (void* head = &buffer[0]) // { // uint* head_i32 = (uint*)buffer; // for (int i = n - 1; i >= 0; --i) // { // *head_i32 = 0x00000000; //empty // head_i32++; // } // } // } // } // else // { // //other color // //#if WIN32 // // uint colorARGB = (uint)((color.alpha << 24) | ((color.red << 16) | (color.green << 8) | color.blue)); // //#else // // uint colorARGB = (uint)((color.alpha << 24) | ((color.blue << 16) | (color.green << 8) | color.red)); // //#endif // //ARGB // uint colorARGB = (uint)((color.alpha << CO.A_SHIFT) | ((color.red << CO.R_SHIFT) | (color.green << CO.G_SHIFT) | color.blue << CO.B_SHIFT)); // int n = len32; // unsafe // { // //fixed (void* head = &buffer[0]) // { // uint* head_i32 = (uint*)buffer; // for (int i = n - 1; i >= 0; --i) // { // *head_i32 = colorARGB; // head_i32++; // } // } // } // } // } // break; // } //} } /// <summary> /// we do NOT store vxs /// </summary> /// <param name="vxs"></param> /// <param name="c"></param> public void Render(VertexStore vxs, Drawing.Color c) { //reset rasterizer before render each vertextSnap //----------------------------- _sclineRas.Reset(); ICoordTransformer transform = this.CurrentTransformMatrix; if (!transform.IsIdentity) { _sclineRas.AddPath(vxs, transform); } else { _sclineRas.AddPath(vxs); } _bmpRasterizer.RenderWithColor(_destBitmapBlender, _sclineRas, _sclinePack8, c); unchecked { _destImageChanged++; }; //----------------------------- } public void SetScanlineRasOrigin(float x, float y) { _sclineRas.OffsetOriginX = x; _sclineRas.OffsetOriginY = y; } public bool UseSubPixelLcdEffect { get { return _sclineRas.ExtendWidthX3ForSubPixelLcdEffect; } set { if (value) { //TODO: review here again _sclineRas.ExtendWidthX3ForSubPixelLcdEffect = true; _bmpRasterizer.ScanlineRenderMode = ScanlineRenderMode.SubPixelLcdEffect; } else { _sclineRas.ExtendWidthX3ForSubPixelLcdEffect = false; _bmpRasterizer.ScanlineRenderMode = ScanlineRenderMode.Default; } } } #if DEBUG VertexStore dbug_v1 = new VertexStore(); VertexStore dbug_v2 = new VertexStore(); Stroke dbugStroke = new Stroke(1); public string dbugName; public void dbugLine(double x1, double y1, double x2, double y2, Drawing.Color color) { dbugStroke.Width = 1; dbug_v1.AddMoveTo(x1, y1); dbug_v1.AddLineTo(x2, y2); //dbug_v1.AddStop(); dbugStroke.MakeVxs(dbug_v1, dbug_v2); Render(dbug_v2, color); dbug_v1.Clear(); dbug_v2.Clear(); } public override string ToString() { return dbugName ?? ""; } #endif } partial class AggRenderSurface { class MyBitmapBlender : BitmapBlenderBase { MemBitmap _bmp; public MyBitmapBlender() { } protected override void OnAttachingDstBitmap(MemBitmap bmp) { _bmp = bmp; } public override void WriteBuffer(int[] newbuffer) { MemBitmap.ReplaceBuffer(_bmp, newbuffer); } } } }
using System.Collections; using System.Collections.Generic; using UnityEngine; using wvr; using WaveVR_Log; using System; using System.Runtime.InteropServices; using System.IO; using System.Threading; public class WaveVR_RenderModel : MonoBehaviour { private static string LOG_TAG = "WaveVR_RenderModel"; private void PrintDebugLog(string msg) { #if UNITY_EDITOR Debug.Log(LOG_TAG + " Hand: " + WhichHand + ", " + msg); #endif Log.d(LOG_TAG, "Hand: " + WhichHand + ", " + msg); } private void PrintInfoLog(string msg) { #if UNITY_EDITOR PrintDebugLog(msg); #endif Log.i(LOG_TAG, "Hand: " + WhichHand + ", " + msg); } private void PrintWarningLog(string msg) { #if UNITY_EDITOR PrintDebugLog(msg); #endif Log.w(LOG_TAG, "Hand: " + WhichHand + ", " + msg); } public enum ControllerHand { Controller_Right, Controller_Left }; public ControllerHand WhichHand = ControllerHand.Controller_Right; public GameObject defaultModel = null; public bool updateDynamically = false; private GameObject controllerSpawned = null; private WVR_DeviceType deviceType = WVR_DeviceType.WVR_DeviceType_Controller_Right; private bool connected = false; private string renderModelNamePath = ""; private string renderModelName = ""; private IntPtr ptrParameterName = IntPtr.Zero; private IntPtr ptrResult = IntPtr.Zero; private uint sessionid = 0; private const string VRACTIVITY_CLASSNAME = "com.htc.vr.unity.WVRUnityVRActivity"; private const string FILEUTILS_CLASSNAME = "com.htc.vr.unity.FileUtils"; private List<Color32> colors = new List<Color32>(); private GameObject meshCom = null; private GameObject meshGO = null; private Mesh updateMesh; private Texture2D MatImage = null; private Material modelMat; private FBXInfo_t[] FBXInfo; private MeshInfo_t[] SectionInfo; private uint sectionCount; private Thread mthread; private bool isChecking = false; private Material ImgMaterial; private static bool isRenderModelEnable = false; private WaitForEndOfFrame wfef = null; private WaitForSeconds wfs = null; void OnEnable() { PrintDebugLog("OnEnable"); if (WhichHand == ControllerHand.Controller_Right) { deviceType = WVR_DeviceType.WVR_DeviceType_Controller_Right; } else { deviceType = WVR_DeviceType.WVR_DeviceType_Controller_Left; } connected = checkConnection(); if (connected) { PrintDebugLog("Controller load when connected!"); WaveVR.Device _device = WaveVR.Instance.getDeviceByType(this.deviceType); onLoadController(_device.type); } WaveVR_Utils.Event.Listen(WaveVR_Utils.Event.DEVICE_CONNECTED, onDeviceConnected); } void OnDisable() { PrintDebugLog("OnDisable"); WaveVR_Utils.Event.Remove(WaveVR_Utils.Event.DEVICE_CONNECTED, onDeviceConnected); } private void onDeviceConnected(params object[] args) { bool _connected = false; WVR_DeviceType _type = this.deviceType; #if UNITY_EDITOR if (Application.isEditor) { _connected = WaveVR_Controller.Input(this.deviceType).connected; _type = WaveVR_Controller.Input(this.deviceType).DeviceType; } else #endif { WaveVR.Device _device = WaveVR.Instance.getDeviceByType(this.deviceType); _connected = _device.connected; _type = _device.type; } PrintDebugLog("onDeviceConnected() " + _type + " is " + (_connected ? "connected" : "disconnected") + ", left-handed? " + WaveVR_Controller.IsLeftHanded); if (connected != _connected) { connected = _connected; } if (connected) { if (!checkChild()) onLoadController(_type); } } // Use this for initialization void Start() { #if UNITY_EDITOR if (Application.isPlaying) { bool _connected = false; WVR_DeviceType _type = this.deviceType; _connected = WaveVR_Controller.Input(this.deviceType).connected; _type = WaveVR_Controller.Input(this.deviceType).DeviceType; onLoadController(_type); return; } #endif PrintDebugLog("start() connect: " + connected + " Which hand: " + WhichHand); ImgMaterial = new Material(Shader.Find("Unlit/Texture")); wfef = new WaitForEndOfFrame(); wfs = new WaitForSeconds(1.0f); if (updateDynamically) { PrintDebugLog("updateDynamically, start a coroutine to check connection and render model name periodly"); StartCoroutine(checkRenderModelAndDelete()); } } // Update is called once per frame void Update() { Log.gpl.d(LOG_TAG, "Update() render model " + WhichHand + " connect ? " + this.connected + ", child object count ? " + transform.childCount); } private void onLoadController(WVR_DeviceType type) { #if UNITY_EDITOR if (Application.isPlaying) { PrintDebugLog("onLoadController in play mode"); if (defaultModel != null) { controllerSpawned = Instantiate(defaultModel, this.transform); controllerSpawned.transform.parent = this.transform; } return; } #endif if (Interop.WVR_GetWaveRuntimeVersion() < 2) { PrintDebugLog("onLoadController in old service"); if (defaultModel != null) { controllerSpawned = Instantiate(defaultModel, this.transform); controllerSpawned.transform.parent = this.transform; } return; } bool loadControllerAssets = true; var found = false; string parameterName = "GetRenderModelName"; ptrParameterName = Marshal.StringToHGlobalAnsi(parameterName); ptrResult = Marshal.AllocHGlobal(64); uint resultVertLength = 64; Interop.WVR_GetParameters(type, ptrParameterName, ptrResult, resultVertLength); renderModelName = Marshal.PtrToStringAnsi(ptrResult); int deviceIndex = -1; parameterName = "backdoor_get_device_index"; ptrParameterName = Marshal.StringToHGlobalAnsi(parameterName); IntPtr ptrResultDeviceIndex = Marshal.AllocHGlobal(2); Interop.WVR_GetParameters(type, ptrParameterName, ptrResultDeviceIndex, 2); int _out = 0; bool _ret = int.TryParse(Marshal.PtrToStringAnsi(ptrResultDeviceIndex), out _out); if (_ret) deviceIndex = _out; PrintInfoLog("get controller id from runtime is " + renderModelName); // 1. check if there are assets in private folder string renderModelFolderPath = Application.temporaryCachePath + "/"; string renderModelUnzipFolder = renderModelFolderPath + renderModelName + "/"; renderModelNamePath = renderModelFolderPath + renderModelName + "/Unity"; if (!Directory.Exists(renderModelNamePath)) { PrintWarningLog(renderModelName + " assets, start to deploy"); loadControllerAssets = deployZIPFile(deviceIndex, renderModelUnzipFolder); } if (loadControllerAssets) { found = loadMeshAndImageByDevice(renderModelNamePath); if (found) { bool renderModelReady = makeupControllerModel(renderModelNamePath, sessionid); PrintDebugLog("renderModelReady = " + renderModelReady); Marshal.FreeHGlobal(ptrParameterName); Marshal.FreeHGlobal(ptrResult); return; } } if (defaultModel != null) { PrintDebugLog("Can't load controller model from DS, load default model"); controllerSpawned = Instantiate(defaultModel, this.transform); controllerSpawned.transform.parent = this.transform; } Marshal.FreeHGlobal(ptrParameterName); Marshal.FreeHGlobal(ptrResult); } private bool deployZIPFile(int deviceIndex, string renderModelUnzipFolder) { AndroidJavaClass ajc = new AndroidJavaClass(VRACTIVITY_CLASSNAME); if (ajc == null || deviceIndex == -1) { PrintWarningLog("AndroidJavaClass vractivity is null, deviceIndex" + deviceIndex); return false; } else { AndroidJavaObject activity = ajc.CallStatic<AndroidJavaObject>("getInstance"); if (activity != null) { AndroidJavaObject afd = activity.Call<AndroidJavaObject>("getControllerModelFileDescriptor", deviceIndex); if (afd != null) { AndroidJavaObject fileUtisObject = new AndroidJavaObject(FILEUTILS_CLASSNAME, activity, afd); if (fileUtisObject != null) { bool retUnzip = fileUtisObject.Call<bool>("doUnZIPAndDeploy", renderModelUnzipFolder); fileUtisObject = null; if (!retUnzip) { PrintWarningLog("doUnZIPAndDeploy failed"); } else { ajc = null; PrintInfoLog("doUnZIPAndDeploy success"); return true; } } else { PrintWarningLog("fileUtisObject is null"); } } else { PrintWarningLog("get fd failed"); } } else { PrintWarningLog("getInstance failed"); } } ajc = null; return false; } private bool loadMeshAndImageByDevice(string renderModelNamePath) { IntPtr ptrError = Marshal.AllocHGlobal(64); string FBXFile = renderModelNamePath + "/" + "unity.fbx"; bool ret = false; string errorCode = ""; if (File.Exists(FBXFile)) { ret = WaveVR_Utils.Assimp.OpenMesh(FBXFile, ref sessionid, ptrError); errorCode = Marshal.PtrToStringAnsi(ptrError); } else { ret = false; errorCode = "unity.fbx is not found!"; } PrintDebugLog("ret = " + ret + " error code = " + errorCode); if (ret) { string imageFile = renderModelNamePath + "/" + "unity.png"; bool fileExist = File.Exists(imageFile); PrintInfoLog("unity.png exist: " + fileExist); ret = fileExist; } PrintInfoLog("loadMeshAndImageByDevice ret: " + ret); Marshal.FreeHGlobal(ptrError); return ret; } public bool makeupControllerModel(string renderModelNamePath, uint sid) { sectionCount = 0; sessionid = sid; if (checkChild()) deleteChild(); string imageFile = renderModelNamePath + "/unity.png"; if (!File.Exists(imageFile)) { PrintDebugLog("failed to load texture"); return false; } byte[] imgByteArray = File.ReadAllBytes(imageFile); MatImage = new Texture2D(2, 2, TextureFormat.BGRA32, false); bool retLoad = MatImage.LoadImage(imgByteArray); PrintDebugLog("load image ret: " + retLoad + " size: " + imgByteArray.Length); if (!retLoad) { PrintDebugLog("failed to load texture"); return false; } bLoadMesh = false; PrintDebugLog("reset bLoadMesh, start to spawn game object after new connection"); StartCoroutine(SpawnRenderModel()); ThreadStart threadStart = new ThreadStart(readNativeData); mthread = new Thread(threadStart); mthread.Start(); isChecking = true; return true; } IEnumerator SpawnRenderModel() { while(true) { if (bLoadMesh) break; PrintDebugLog("SpawnRenderModel is waiting"); yield return wfef; } spawnMesh(); } bool bLoadMesh = false; void spawnMesh() { if (!bLoadMesh) { PrintDebugLog("bLoadMesh is false, skipping spawn objects"); return; } string meshName = ""; for (uint i = 0; i < sectionCount; i++) { meshName = Marshal.PtrToStringAnsi(FBXInfo[i].meshName); meshCom = null; meshGO = null; updateMesh = new Mesh(); meshCom = new GameObject(); meshCom.AddComponent<MeshRenderer>(); meshCom.AddComponent<MeshFilter>(); // meshCom = Resources.Load("MeshComponent") as GameObject; meshGO = Instantiate(meshCom); meshGO.transform.parent = this.transform; meshGO.name = meshName; Matrix4x4 t = WaveVR_Utils.RigidTransform.toMatrix44(FBXInfo[i].matrix); Vector3 x = WaveVR_Utils.GetPosition(t); meshGO.transform.localPosition = new Vector3(-x.x, x.y, x.z); PrintDebugLog("i = " + i + " MeshGO = " + meshName + ", localPosition: " + meshGO.transform.localPosition.x + ", " + meshGO.transform.localPosition.y + ", " + meshGO.transform.localPosition.z); meshGO.transform.localRotation = WaveVR_Utils.GetRotation(t); float a = 0f; Vector3 b = Vector3.zero; meshGO.transform.localRotation.ToAngleAxis(out a, out b); PrintDebugLog("i = " + i + " MeshGO = " + meshName + ", localRotation: " + b); //PrintDebugLog(" MeshGO = " + meshName + ", localRotation: " + meshGO.transform.localRotation); meshGO.transform.localScale = WaveVR_Utils.GetScale(t); PrintDebugLog("i = " + i + " MeshGO = " + meshName + ", localScale: " + meshGO.transform.localScale); meshGO.transform.Rotate(new Vector3(0, 180, 0)); var meshfilter = meshGO.GetComponent<MeshFilter>(); updateMesh.Clear(); updateMesh.vertices = SectionInfo[i]._vectice; updateMesh.uv = SectionInfo[i]._uv; updateMesh.uv2 = SectionInfo[i]._uv; updateMesh.colors32 = colors.ToArray(); updateMesh.normals = SectionInfo[i]._normal; updateMesh.SetIndices(SectionInfo[i]._indice, MeshTopology.Triangles, 0); updateMesh.name = meshName; if (meshfilter != null) { meshfilter.mesh = updateMesh; } var meshRenderer = meshGO.GetComponent<MeshRenderer>(); if (meshRenderer != null) { meshRenderer.material = ImgMaterial; meshRenderer.material.mainTexture = MatImage; meshRenderer.enabled = true; } if (meshName == "__CM__TouchPad_Touch") meshGO.SetActive(false); } WaveVR_Utils.Event.Send(WaveVR_Utils.Event.ADAPTIVE_CONTROLLER_READY, deviceType); cleanNativeData(); Resources.UnloadUnusedAssets(); } void cleanNativeData() { for (int i = 0; i < sectionCount; i++) { for (int j = 0; j < FBXInfo[i].verticeCount; j++) { SectionInfo[i]._vectice[j] = Vector3.zero; } SectionInfo[i]._vectice = null; for (int j = 0; j < FBXInfo[i].verticeCount; j++) { SectionInfo[i]._normal[j] = Vector3.zero; } SectionInfo[i]._normal = null; for (int j = 0; j < FBXInfo[i].verticeCount; j++) { SectionInfo[i]._uv[j] = Vector2.zero; } SectionInfo[i]._uv = null; for (int j = 0; j < FBXInfo[i].verticeCount; j++) { SectionInfo[i]._indice[j] = 0; } SectionInfo[i]._indice = null; Marshal.FreeHGlobal(FBXInfo[i].meshName); } SectionInfo = null; FBXInfo = null; WaveVR_Utils.Assimp.releaseMesh(sessionid); } void readNativeData() { bool ret = false; PrintDebugLog("sessionid = " + sessionid); bool finishLoading = WaveVR_Utils.Assimp.getSectionCount(sessionid, ref sectionCount); if (!finishLoading || sectionCount == 0) { PrintDebugLog("failed to load mesh"); return; } FBXInfo = new FBXInfo_t[sectionCount]; SectionInfo = new MeshInfo_t[sectionCount]; for (int i = 0; i < sectionCount; i++) { FBXInfo[i] = new FBXInfo_t(); SectionInfo[i] = new MeshInfo_t(); FBXInfo[i].meshName = Marshal.AllocHGlobal(64); } ret = WaveVR_Utils.Assimp.getMeshData(sessionid, FBXInfo); if (!ret) { for (int i = 0; i < sectionCount; i++) { Marshal.FreeHGlobal(FBXInfo[i].meshName); } SectionInfo = null; FBXInfo = null; WaveVR_Utils.Assimp.releaseMesh(sessionid); return; } for (uint i = 0; i < sectionCount; i++) { SectionInfo[i]._vectice = new Vector3[FBXInfo[i].verticeCount]; for (int j = 0; j < FBXInfo[i].verticeCount; j++) { SectionInfo[i]._vectice[j] = new Vector3(); } SectionInfo[i]._normal = new Vector3[FBXInfo[i].normalCount]; for (int j = 0; j < FBXInfo[i].verticeCount; j++) { SectionInfo[i]._normal[j] = new Vector3(); } SectionInfo[i]._uv = new Vector2[FBXInfo[i].uvCount]; for (int j = 0; j < FBXInfo[i].verticeCount; j++) { SectionInfo[i]._uv[j] = new Vector2(); } SectionInfo[i]._indice = new int[FBXInfo[i].indiceCount]; for (int j = 0; j < FBXInfo[i].verticeCount; j++) { SectionInfo[i]._indice[j] = new int(); } bool tret = WaveVR_Utils.Assimp.getSectionData(sessionid, i, SectionInfo[i]._vectice, SectionInfo[i]._normal, SectionInfo[i]._uv, SectionInfo[i]._indice); if (!tret) continue; PrintDebugLog("i = " + i + ", relative transform = [" + FBXInfo[i].matrix.m0 + " , " + FBXInfo[i].matrix.m1 + " , " + FBXInfo[i].matrix.m2 + " , " + FBXInfo[i].matrix.m3 + "] "); PrintDebugLog("i = " + i + ", relative transform = [" + FBXInfo[i].matrix.m4 + " , " + FBXInfo[i].matrix.m5 + " , " + FBXInfo[i].matrix.m6 + " , " + FBXInfo[i].matrix.m7 + "] "); PrintDebugLog("i = " + i + ", relative transform = [" + FBXInfo[i].matrix.m8 + " , " + FBXInfo[i].matrix.m9 + " , " + FBXInfo[i].matrix.m10 + " , " + FBXInfo[i].matrix.m11 + "] "); PrintDebugLog("i = " + i + ", relative transform = [" + FBXInfo[i].matrix.m12 + " , " + FBXInfo[i].matrix.m13 + " , " + FBXInfo[i].matrix.m14 + " , " + FBXInfo[i].matrix.m15 + "] "); PrintDebugLog("i = " + i + ", vertice count = " + FBXInfo[i].verticeCount + ", normal count = " + FBXInfo[i].normalCount + ", uv count = " + FBXInfo[i].uvCount + ", indice count = " + FBXInfo[i].indiceCount); } bLoadMesh = true; } void OnApplicationPause(bool pauseStatus) { #if UNITY_EDITOR if (Application.isPlaying) return; #endif if (updateDynamically) { if (!pauseStatus) // resume { isChecking = DeleteControllerWhenDisconnect(); } else { isChecking = false; } } } IEnumerator checkRenderModelAndDelete() { while (true) { if (isChecking) { isChecking = DeleteControllerWhenDisconnect(); } yield return wfs; } } private void deleteChild() { var ch = transform.childCount; for (int i = 0; i < ch; i++) { PrintInfoLog("deleteChild: " + transform.GetChild(i).gameObject.name); GameObject CM = transform.GetChild(i).gameObject; Destroy(CM); } } private bool checkChild() { var ch = transform.childCount; return (ch > 0) ? true : false; } private bool DeleteControllerWhenDisconnect() { if (!checkChild()) return false; bool _connected = WaveVR_Controller.Input(this.deviceType).connected; if (_connected) { WVR_DeviceType type = WaveVR_Controller.Input(this.deviceType).DeviceType; string parameterName = "GetRenderModelName"; ptrParameterName = Marshal.StringToHGlobalAnsi(parameterName); ptrResult = Marshal.AllocHGlobal(64); uint resultVertLength = 64; Interop.WVR_GetParameters(type, ptrParameterName, ptrResult, resultVertLength); string tmprenderModelName = Marshal.PtrToStringAnsi(ptrResult); if (tmprenderModelName != renderModelName) { PrintInfoLog("Destroy controller prefeb because render model is different"); deleteChild(); Marshal.FreeHGlobal(ptrParameterName); Marshal.FreeHGlobal(ptrResult); return false; } Marshal.FreeHGlobal(ptrParameterName); Marshal.FreeHGlobal(ptrResult); } else { PrintInfoLog("Destroy controller prefeb because it is disconnect"); deleteChild(); return false; } return true; } private bool checkConnection() { #if UNITY_EDITOR if (Application.isEditor) { return false; } else #endif { WaveVR.Device _device = WaveVR.Instance.getDeviceByType(this.deviceType); return _device.connected; } } }
using System; using System.Collections.Generic; using System.Linq; using DotNetUtils; namespace Templator { public class TemplatorInputMapper<TContext> where TContext : TextHolderMappingContext, new() { protected readonly bool UseDeclaredPreiories ; protected readonly IList<ITextHolderResolver<TContext>> CustomResolvers = new List<ITextHolderResolver<TContext>>(); protected readonly IDictionary<string, IList<NameResolver<TContext>>> NameResolvers = new Dictionary<string, IList<NameResolver<TContext>>>(); protected readonly IDictionary<string, IList<CategoryResolver<TContext>>> CategoryResolvers = new Dictionary<string, IList<CategoryResolver<TContext>>>(); protected readonly IDictionary<string, IList<HierarchResolver<TContext>>> HierarchyResolvers = new Dictionary<string, IList<HierarchResolver<TContext>>>(); protected Func<TextHolder, TContext, object> DefaultResovler; protected object DefaultResolvedValue; protected bool DefaultResolvedValueSet; public ILogger Logger; public Stack<TContext> Contexts; public IList<Func<TextHolder, ITextHolderResolver<TContext>>> ResolvingMethods = new List<Func<TextHolder, ITextHolderResolver<TContext>>>(); public TContext Context => Contexts.Count > 0 ? Contexts.Peek() : null; /// <summary> /// Initialize a config instance /// </summary> /// <param name="useDeclaringOrderAsPreiories">Set to true to use the exact order of the resolvers added when resolving /// set to false to enable a method list to decide the priority of the resolver matching, the priority is determined by the order of enabling resolvers /// Call EnableXXXResolvers methods in the desire order, or call UseDefaultLookupOrder before generating</param> public TemplatorInputMapper(bool useDeclaringOrderAsPreiories = false) { UseDeclaredPreiories = useDeclaringOrderAsPreiories; } public TemplatorInputMapper(TemplatorInputMapper<TContext> configInstance, bool deepCopy = false) { UseDeclaredPreiories = configInstance.UseDeclaredPreiories; if (deepCopy) { CustomResolvers = configInstance.CustomResolvers.ToList(); NameResolvers = configInstance.NameResolvers.Copy(); CategoryResolvers = configInstance.CategoryResolvers.Copy(); HierarchyResolvers = configInstance.HierarchyResolvers.Copy(); } else { CustomResolvers = configInstance.CustomResolvers; NameResolvers = configInstance.NameResolvers; CategoryResolvers = configInstance.CategoryResolvers; HierarchyResolvers = configInstance.HierarchyResolvers; } } /// <summary> /// Add a resolver which matches text holder with a customized function /// </summary> /// <returns></returns> public CustomResolver<TContext> AddCustomResolver() { var ret = new CustomResolver<TContext>(); CustomResolvers.Add(ret); return ret; } /// <summary> /// Add a resolver which matches primarily with the text holder's name defined /// </summary> /// <param name="name"></param> /// <returns></returns> public NameResolver<TContext> AddNameResolver(string name) { var ret = new NameResolver<TContext>(name); if (UseDeclaredPreiories) { CustomResolvers.Add(ret); } else { var list = GetResolverList(name, NameResolvers); list.Add(ret); } return ret; } /// <summary> /// Add a resolver which matches primarily with the text holder's category defined /// </summary> /// <param name="category"></param> /// <returns></returns> public CategoryResolver<TContext> AddCategoryResolver(string category) { var ret = new CategoryResolver<TContext>(category); if (UseDeclaredPreiories) { CustomResolvers.Add(ret); } else { var list = GetResolverList(category, CategoryResolvers); list.Add(ret); } return ret; } /// <summary> /// Add a resolver which matches primarily with the text holder's parent path /// </summary> /// <param name="path"></param> /// <returns></returns> public HierarchResolver<TContext> AddHierarchyResolver(string path) { var ret = new HierarchResolver<TContext>(path); if (UseDeclaredPreiories) { CustomResolvers.Add(ret); } else { var list = GetResolverList(path, HierarchyResolvers); list.Add(ret); } return ret; } /// <summary> /// Set a default resolver called when no other resolvers were found /// </summary> /// <param name="resolverMethod"></param> public void SetDefaultResolver(Func<TextHolder, TContext, object> resolverMethod) { if (DefaultResolvedValueSet) { throw new InvalidOperationException("Default resolved constant value has been already set"); } DefaultResovler = resolverMethod; } /// <summary> /// Set a default resolved value to be used when no resolver was matched for given text holder /// </summary> /// <param name="value"></param> public void SetDefaultValue(object value) { if (DefaultResovler != null) { throw new InvalidOperationException("Default resolver method has been already set"); } DefaultResolvedValue = value; DefaultResolvedValueSet = true; } public IDictionary<string, object> GenerateInput(IDictionary<string, TextHolder> holders, TContext initialContext) { if (ResolvingMethods.IsNullOrEmpty()) { throw new InvalidOperationException("No resolvers has been enabled yet."); } Contexts = new Stack<TContext>(); return GenerateInputInternal(holders, initialContext); } protected IDictionary<string, object> GenerateInputInternal(IDictionary<string, TextHolder> holders, TContext context) { context = context ?? new TContext(); context.Parent = Context; context.Root = context.Parent?.Root ?? context; context.Path = context.Parent?.Path + (String.IsNullOrEmpty(Context?.Path) ? null : ".") + context.Parent?.Holder?.Name; Contexts.Push(context); foreach (var holder in holders.Values) { context.Holder = holder; var resolver = FindResolver(holder); if (resolver != null) { var value = resolver.ResolveValue(holder, context); if (value == null && (DefaultResolvedValueSet || DefaultResovler != null)) { value = (DefaultResolvedValueSet ? DefaultResolvedValue : DefaultResovler?.Invoke(holder, context)); } if (value == null && !holder.IsOptional()) { LogError("'{0}' is required.", holder.Name); } if (holder.Children == null) { context.Result.Add(holder.Name, value); } else { var dicts = value as IEnumerable<TContext>; if (dicts == null) { LogError("Collection resolved non-collection value"); } else { var list = new List<IDictionary<string, object>>(); var index = 0; foreach (var item in dicts) { item.CollectionIndex = index++; var child = GenerateInputInternal(holder.Children, item); list.Add(child); } context.Result.Add(holder.Name, list.ToArray()); } } } context.Holder = null; } var ret = context.Result; Contexts.Pop(); return ret; } /// <summary> /// Remove all method from resolver method list /// </summary> public void DisableAllResolvers() { ResolvingMethods.Clear(); } /// <summary> /// Use default fall back look up order, which is custom -> Name -> hierarchy -> category /// </summary> public void UseDefaultLookupOrder() { ResolvingMethods.Clear(); ResolvingMethods.Add(FindCustomResolver); ResolvingMethods.Add(FindNameResolver); ResolvingMethods.Add(FindHierachyResolver); ResolvingMethods.Add(FindCategoryResolver); } /// <summary> /// Add custom resolvers into resolver method list /// </summary> public void EnableCustomResolvers() { ResolvingMethods.Add(FindCustomResolver); } /// <summary> /// Add name resolvers into resolver method list /// </summary> public void EnableNameResolvers() { ResolvingMethods.Add(FindNameResolver); } /// <summary> /// Add category resolvers into resolver method list /// </summary> public void EnableCategoryResolvers() { ResolvingMethods.Add(FindCategoryResolver); } /// <summary> /// Add hierarchy resolvers into resolver method list /// </summary> public void EnableHierachyResolvers() { ResolvingMethods.Add(FindHierachyResolver); } /// <summary> /// Add a custom method into resolver method list to provide a customized logic for resolver look up /// </summary> public void AddResolverMethod(Func<TextHolder, ITextHolderResolver<TContext>> method) { ResolvingMethods.Add(method); } protected ITextHolderResolver<TContext> FindResolver(TextHolder holder) { if (UseDeclaredPreiories) { var ret = FindCustomResolver(holder); if (ret != null) { return ret; } } else { foreach (var method in ResolvingMethods) { var ret = method(holder); if (ret != null) { return ret; } } } if (!holder.IsOptional()) { LogError("No resolvers found for holder: {0}.", holder); } return null; } protected NameResolver<TContext> FindNameResolver(TextHolder holder) { var list = NameResolvers.GetOrDefault(holder.Name); return list?.FirstOrDefault(item => item.Match(holder, Context)); } protected ITextHolderResolver<TContext> FindCustomResolver(TextHolder holder) { return CustomResolvers.FirstOrDefault(item => item.Match(holder, Context)); } protected CategoryResolver<TContext> FindCategoryResolver(TextHolder holder) { var list = CategoryResolvers.GetOrDefault(holder.Category); return list?.FirstOrDefault(item => item.Match(holder, Context)); } protected HierarchResolver<TContext> FindHierachyResolver(TextHolder holder) { var list = HierarchyResolvers.GetOrDefault(Context.Path); return list?.FirstOrDefault(item => item.Match(holder, Context)); } protected void LogError(string pattern, params object[] args) { Logger?.LogError(pattern, args); } protected IList<T> GetResolverList<T>(string key, IDictionary<string, IList<T>> dict) where T: ITextHolderResolver<TContext> { IList<T> list; if (!dict.ContainsKey(key)) { list = new List<T>(); dict.Add(key, list); } else { list = dict[key]; } return list; } } }
// This file is part of YamlDotNet - A .NET library for YAML. // Copyright (c) Antoine Aubry and contributors // Permission is hereby granted, free of charge, to any person obtaining a copy of // this software and associated documentation files (the "Software"), to deal in // the Software without restriction, including without limitation the rights to // use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies // of the Software, and to permit persons to whom the Software is furnished to do // so, subject to the following conditions: // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. using System; using System.Collections.Generic; using System.Globalization; using System.IO; using System.Text; using System.Text.RegularExpressions; using YamlDotNet.Core.Events; using ParsingEvent = YamlDotNet.Core.Events.ParsingEvent; using TagDirective = YamlDotNet.Core.Tokens.TagDirective; using VersionDirective = YamlDotNet.Core.Tokens.VersionDirective; namespace YamlDotNet.Core { /// <summary> /// Emits YAML streams. /// </summary> public class Emitter : IEmitter { private const int MinBestIndent = 2; private const int MaxBestIndent = 9; private const int MaxAliasLength = 128; private static readonly Regex uriReplacer = new Regex(@"[^0-9A-Za-z_\-;?@=$~\\\)\]/:&+,\.\*\(\[!]", StandardRegexOptions.Compiled | RegexOptions.Singleline); private readonly TextWriter output; private readonly bool outputUsesUnicodeEncoding; private readonly bool isCanonical; private readonly int bestIndent; private readonly int bestWidth; private EmitterState state; private readonly Stack<EmitterState> states = new Stack<EmitterState>(); private readonly Queue<ParsingEvent> events = new Queue<ParsingEvent>(); private readonly Stack<int> indents = new Stack<int>(); private readonly TagDirectiveCollection tagDirectives = new TagDirectiveCollection(); private int indent; private int flowLevel; private bool isMappingContext; private bool isSimpleKeyContext; private bool isRootContext; private int column; private bool isWhitespace; private bool isIndentation; private bool isOpenEnded; private bool isDocumentEndWritten; private readonly AnchorData anchorData = new AnchorData(); private readonly TagData tagData = new TagData(); private readonly ScalarData scalarData = new ScalarData(); private class AnchorData { public string anchor; public bool isAlias; } private class TagData { public string handle; public string suffix; } private class ScalarData { public string value; public bool isMultiline; public bool isFlowPlainAllowed; public bool isBlockPlainAllowed; public bool isSingleQuotedAllowed; public bool isBlockAllowed; public bool hasSingleQuotes; public ScalarStyle style; } /// <summary> /// Initializes a new instance of the <see cref="Emitter"/> class. /// </summary> /// <param name="output">The <see cref="TextWriter"/> where the emitter will write.</param> public Emitter(TextWriter output) : this(output, MinBestIndent) { } /// <summary> /// Initializes a new instance of the <see cref="Emitter"/> class. /// </summary> /// <param name="output">The <see cref="TextWriter"/> where the emitter will write.</param> /// <param name="bestIndent">The preferred indentation.</param> public Emitter(TextWriter output, int bestIndent) : this(output, bestIndent, int.MaxValue) { } /// <summary> /// Initializes a new instance of the <see cref="Emitter"/> class. /// </summary> /// <param name="output">The <see cref="TextWriter"/> where the emitter will write.</param> /// <param name="bestIndent">The preferred indentation.</param> /// <param name="bestWidth">The preferred text width.</param> public Emitter(TextWriter output, int bestIndent, int bestWidth) : this(output, bestIndent, bestWidth, false) { } /// <summary> /// Initializes a new instance of the <see cref="Emitter"/> class. /// </summary> /// <param name="output">The <see cref="TextWriter"/> where the emitter will write.</param> /// <param name="bestIndent">The preferred indentation.</param> /// <param name="bestWidth">The preferred text width.</param> /// <param name="isCanonical">If true, write the output in canonical form.</param> public Emitter(TextWriter output, int bestIndent, int bestWidth, bool isCanonical) { if (bestIndent < MinBestIndent || bestIndent > MaxBestIndent) { throw new ArgumentOutOfRangeException("bestIndent", string.Format(CultureInfo.InvariantCulture, "The bestIndent parameter must be between {0} and {1}.", MinBestIndent, MaxBestIndent)); } this.bestIndent = bestIndent; if (bestWidth <= bestIndent * 2) { throw new ArgumentOutOfRangeException("bestWidth", "The bestWidth parameter must be greater than bestIndent * 2."); } this.bestWidth = bestWidth; this.isCanonical = isCanonical; this.output = output; this.outputUsesUnicodeEncoding = IsUnicode(output.Encoding); } /// <summary> /// Emit an evt. /// </summary> public void Emit(ParsingEvent @event) { events.Enqueue(@event); while (!NeedMoreEvents()) { var current = events.Peek(); try { AnalyzeEvent(current); StateMachine(current); } finally { // Only dequeue after calling state_machine because it checks how many events are in the queue. // Todo: well, move into StateMachine() then events.Dequeue(); } } } /// <summary> /// Check if we need to accumulate more events before emitting. /// /// We accumulate extra /// - 1 event for DOCUMENT-START /// - 2 events for SEQUENCE-START /// - 3 events for MAPPING-START /// </summary> private bool NeedMoreEvents() { if (events.Count == 0) { return true; } int accumulate; switch (events.Peek().Type) { case EventType.DocumentStart: accumulate = 1; break; case EventType.SequenceStart: accumulate = 2; break; case EventType.MappingStart: accumulate = 3; break; default: return false; } if (events.Count > accumulate) { return false; } var level = 0; foreach (var evt in events) { switch (evt.Type) { case EventType.DocumentStart: case EventType.SequenceStart: case EventType.MappingStart: ++level; break; case EventType.DocumentEnd: case EventType.SequenceEnd: case EventType.MappingEnd: --level; break; } if (level == 0) { return false; } } return true; } private void AnalyzeEvent(ParsingEvent evt) { anchorData.anchor = null; tagData.handle = null; tagData.suffix = null; var alias = evt as AnchorAlias; if (alias != null) { AnalyzeAnchor(alias.Value, true); return; } var nodeEvent = evt as NodeEvent; if (nodeEvent != null) { var scalar = evt as Scalar; if (scalar != null) { AnalyzeScalar(scalar); } AnalyzeAnchor(nodeEvent.Anchor, false); if (!string.IsNullOrEmpty(nodeEvent.Tag) && (isCanonical || nodeEvent.IsCanonical)) { AnalyzeTag(nodeEvent.Tag); } } } private void AnalyzeAnchor(string anchor, bool isAlias) { anchorData.anchor = anchor; anchorData.isAlias = isAlias; } private void AnalyzeScalar(Scalar scalar) { var value = scalar.Value; scalarData.value = value; if (value.Length == 0) { if (scalar.Tag == "tag:yaml.org,2002:null") { scalarData.isMultiline = false; scalarData.isFlowPlainAllowed = false; scalarData.isBlockPlainAllowed = true; scalarData.isSingleQuotedAllowed = false; scalarData.isBlockAllowed = false; } else { scalarData.isMultiline = false; scalarData.isFlowPlainAllowed = false; scalarData.isBlockPlainAllowed = false; scalarData.isSingleQuotedAllowed = true; scalarData.isBlockAllowed = false; } return; } var flowIndicators = false; var blockIndicators = false; if (value.StartsWith("---", StringComparison.Ordinal) || value.StartsWith("...", StringComparison.Ordinal)) { flowIndicators = true; blockIndicators = true; } var buffer = new CharacterAnalyzer<StringLookAheadBuffer>(new StringLookAheadBuffer(value)); var preceededByWhitespace = true; var followedByWhitespace = buffer.IsWhiteBreakOrZero(1); var leadingSpace = false; var leadingBreak = false; var trailingSpace = false; var trailingBreak = false; var leadingQuote = false; var breakSpace = false; var spaceBreak = false; var previousSpace = false; var previousBreak = false; var lineBreaks = false; var specialCharacters = !ValueIsRepresentableInOutputEncoding(value); var singleQuotes = false; var isFirst = true; while (!buffer.EndOfInput) { if (isFirst) { if (buffer.Check(@"#,[]{}&*!|>\""%@`'")) { flowIndicators = true; blockIndicators = true; leadingQuote = buffer.Check('\''); singleQuotes |= buffer.Check('\''); } if (buffer.Check("?:")) { flowIndicators = true; if (followedByWhitespace) { blockIndicators = true; } } if (buffer.Check('-') && followedByWhitespace) { flowIndicators = true; blockIndicators = true; } } else { if (buffer.Check(",?[]{}")) { flowIndicators = true; } if (buffer.Check(':')) { flowIndicators = true; if (followedByWhitespace) { blockIndicators = true; } } if (buffer.Check('#') && preceededByWhitespace) { flowIndicators = true; blockIndicators = true; } singleQuotes |= buffer.Check('\''); } if (!specialCharacters && !buffer.IsPrintable()) { specialCharacters = true; } if (buffer.IsBreak()) { lineBreaks = true; } if (buffer.IsSpace()) { if (isFirst) { leadingSpace = true; } if (buffer.Buffer.Position >= buffer.Buffer.Length - 1) { trailingSpace = true; } if (previousBreak) { breakSpace = true; } previousSpace = true; previousBreak = false; } else if (buffer.IsBreak()) { if (isFirst) { leadingBreak = true; } if (buffer.Buffer.Position >= buffer.Buffer.Length - 1) { trailingBreak = true; } if (previousSpace) { spaceBreak = true; } previousSpace = false; previousBreak = true; } else { previousSpace = false; previousBreak = false; } preceededByWhitespace = buffer.IsWhiteBreakOrZero(); buffer.Skip(1); if (!buffer.EndOfInput) { followedByWhitespace = buffer.IsWhiteBreakOrZero(1); } isFirst = false; } scalarData.isFlowPlainAllowed = true; scalarData.isBlockPlainAllowed = true; scalarData.isSingleQuotedAllowed = true; scalarData.isBlockAllowed = true; if (leadingSpace || leadingBreak || trailingSpace || trailingBreak || leadingQuote) { scalarData.isFlowPlainAllowed = false; scalarData.isBlockPlainAllowed = false; } if (trailingSpace) { scalarData.isBlockAllowed = false; } if (breakSpace) { scalarData.isFlowPlainAllowed = false; scalarData.isBlockPlainAllowed = false; scalarData.isSingleQuotedAllowed = false; } if (spaceBreak || specialCharacters) { scalarData.isFlowPlainAllowed = false; scalarData.isBlockPlainAllowed = false; scalarData.isSingleQuotedAllowed = false; scalarData.isBlockAllowed = false; } scalarData.isMultiline = lineBreaks; if (lineBreaks) { scalarData.isFlowPlainAllowed = false; scalarData.isBlockPlainAllowed = false; } if (flowIndicators) { scalarData.isFlowPlainAllowed = false; } if (blockIndicators) { scalarData.isBlockPlainAllowed = false; } scalarData.hasSingleQuotes = singleQuotes; } private bool ValueIsRepresentableInOutputEncoding(string value) { if (outputUsesUnicodeEncoding) { return true; } try { var encodedBytes = output.Encoding.GetBytes(value); var decodedString = output.Encoding.GetString(encodedBytes, 0, encodedBytes.Length); return decodedString.Equals(value); } catch (EncoderFallbackException) { return false; } catch (ArgumentOutOfRangeException) { return false; } } private bool IsUnicode(Encoding encoding) { return encoding is UTF8Encoding || encoding is UnicodeEncoding || encoding is UTF7Encoding || encoding is UTF8Encoding; } private void AnalyzeTag(string tag) { tagData.handle = tag; foreach (var tagDirective in tagDirectives) { if (tag.StartsWith(tagDirective.Prefix, StringComparison.Ordinal)) { tagData.handle = tagDirective.Handle; tagData.suffix = tag.Substring(tagDirective.Prefix.Length); break; } } } private void StateMachine(ParsingEvent evt) { var comment = evt as Comment; if (comment != null) { EmitComment(comment); return; } switch (state) { case EmitterState.StreamStart: EmitStreamStart(evt); break; case EmitterState.FirstDocumentStart: EmitDocumentStart(evt, true); break; case EmitterState.DocumentStart: EmitDocumentStart(evt, false); break; case EmitterState.DocumentContent: EmitDocumentContent(evt); break; case EmitterState.DocumentEnd: EmitDocumentEnd(evt); break; case EmitterState.FlowSequenceFirstItem: EmitFlowSequenceItem(evt, true); break; case EmitterState.FlowSequenceItem: EmitFlowSequenceItem(evt, false); break; case EmitterState.FlowMappingFirstKey: EmitFlowMappingKey(evt, true); break; case EmitterState.FlowMappingKey: EmitFlowMappingKey(evt, false); break; case EmitterState.FlowMappingSimpleValue: EmitFlowMappingValue(evt, true); break; case EmitterState.FlowMappingValue: EmitFlowMappingValue(evt, false); break; case EmitterState.BlockSequenceFirstItem: EmitBlockSequenceItem(evt, true); break; case EmitterState.BlockSequenceItem: EmitBlockSequenceItem(evt, false); break; case EmitterState.BlockMappingFirstKey: EmitBlockMappingKey(evt, true); break; case EmitterState.BlockMappingKey: EmitBlockMappingKey(evt, false); break; case EmitterState.BlockMappingSimpleValue: EmitBlockMappingValue(evt, true); break; case EmitterState.BlockMappingValue: EmitBlockMappingValue(evt, false); break; case EmitterState.StreamEnd: throw new YamlException("Expected nothing after STREAM-END"); default: throw new InvalidOperationException(); } } private void EmitComment(Comment comment) { if (comment.IsInline) { Write(' '); } else { WriteIndent(); } Write("# "); Write(comment.Value); WriteBreak(); isIndentation = true; } /// <summary> /// Expect STREAM-START. /// </summary> private void EmitStreamStart(ParsingEvent evt) { if (!(evt is StreamStart)) { throw new ArgumentException("Expected STREAM-START.", "evt"); } indent = -1; column = 0; isWhitespace = true; isIndentation = true; state = EmitterState.FirstDocumentStart; } /// <summary> /// Expect DOCUMENT-START or STREAM-END. /// </summary> private void EmitDocumentStart(ParsingEvent evt, bool isFirst) { var documentStart = evt as DocumentStart; if (documentStart != null) { var isImplicit = documentStart.IsImplicit && isFirst && !isCanonical; var documentTagDirectives = NonDefaultTagsAmong(documentStart.Tags); if (!isFirst && !isDocumentEndWritten && (documentStart.Version != null || documentTagDirectives.Count > 0)) { isDocumentEndWritten = false; WriteIndicator("...", true, false, false); WriteIndent(); } if (documentStart.Version != null) { AnalyzeVersionDirective(documentStart.Version); isImplicit = false; WriteIndicator("%YAML", true, false, false); WriteIndicator(string.Format(CultureInfo.InvariantCulture, "{0}.{1}", Constants.MajorVersion, Constants.MinorVersion), true, false, false); WriteIndent(); } foreach (var tagDirective in documentTagDirectives) { AppendTagDirectiveTo(tagDirective, false, tagDirectives); } foreach (var tagDirective in Constants.DefaultTagDirectives) { AppendTagDirectiveTo(tagDirective, true, tagDirectives); } if (documentTagDirectives.Count > 0) { isImplicit = false; foreach (var tagDirective in Constants.DefaultTagDirectives) { AppendTagDirectiveTo(tagDirective, true, documentTagDirectives); } foreach (var tagDirective in documentTagDirectives) { WriteIndicator("%TAG", true, false, false); WriteTagHandle(tagDirective.Handle); WriteTagContent(tagDirective.Prefix, true); WriteIndent(); } } if (CheckEmptyDocument()) { isImplicit = false; } if (!isImplicit) { WriteIndent(); WriteIndicator("---", true, false, false); if (isCanonical) { WriteIndent(); } } state = EmitterState.DocumentContent; } else if (evt is StreamEnd) { if (isOpenEnded) { WriteIndicator("...", true, false, false); WriteIndent(); } state = EmitterState.StreamEnd; } else { throw new YamlException("Expected DOCUMENT-START or STREAM-END"); } } private TagDirectiveCollection NonDefaultTagsAmong(IEnumerable<TagDirective> tagCollection) { var directives = new TagDirectiveCollection(); if (tagCollection == null) return directives; foreach (var tagDirective in tagCollection) { AppendTagDirectiveTo(tagDirective, false, directives); } foreach (var tagDirective in Constants.DefaultTagDirectives) { directives.Remove(tagDirective); } return directives; } // ReSharper disable UnusedParameter.Local private void AnalyzeVersionDirective(VersionDirective versionDirective) { if (versionDirective.Version.Major != Constants.MajorVersion || versionDirective.Version.Minor != Constants.MinorVersion) { throw new YamlException("Incompatible %YAML directive"); } } // ReSharper restore UnusedParameter.Local private static void AppendTagDirectiveTo(TagDirective value, bool allowDuplicates, TagDirectiveCollection tagDirectives) { if (tagDirectives.Contains(value)) { if (!allowDuplicates) { throw new YamlException("Duplicate %TAG directive."); } } else { tagDirectives.Add(value); } } /// <summary> /// Expect the root node. /// </summary> private void EmitDocumentContent(ParsingEvent evt) { states.Push(EmitterState.DocumentEnd); EmitNode(evt, true, false, false); } /// <summary> /// Expect a node. /// </summary> private void EmitNode(ParsingEvent evt, bool isRoot, bool isMapping, bool isSimpleKey) { isRootContext = isRoot; isMappingContext = isMapping; isSimpleKeyContext = isSimpleKey; switch (evt.Type) { case EventType.Alias: EmitAlias(); break; case EventType.Scalar: EmitScalar(evt); break; case EventType.SequenceStart: EmitSequenceStart(evt); break; case EventType.MappingStart: EmitMappingStart(evt); break; default: throw new YamlException(string.Format("Expected SCALAR, SEQUENCE-START, MAPPING-START, or ALIAS, got {0}", evt.Type)); } } /// <summary> /// Expect ALIAS. /// </summary> private void EmitAlias() { ProcessAnchor(); state = states.Pop(); } /// <summary> /// Expect SCALAR. /// </summary> private void EmitScalar(ParsingEvent evt) { SelectScalarStyle(evt); ProcessAnchor(); ProcessTag(); IncreaseIndent(true, false); ProcessScalar(); indent = indents.Pop(); state = states.Pop(); } private void SelectScalarStyle(ParsingEvent evt) { var scalar = (Scalar)evt; var style = scalar.Style; var noTag = tagData.handle == null && tagData.suffix == null; if (noTag && !scalar.IsPlainImplicit && !scalar.IsQuotedImplicit) { throw new YamlException("Neither tag nor isImplicit flags are specified."); } if (style == ScalarStyle.Any) { style = scalarData.isMultiline ? ScalarStyle.Folded : ScalarStyle.Plain; } if (isCanonical) { style = ScalarStyle.DoubleQuoted; } if (isSimpleKeyContext && scalarData.isMultiline) { style = ScalarStyle.DoubleQuoted; } if (style == ScalarStyle.Plain) { if ((flowLevel != 0 && !scalarData.isFlowPlainAllowed) || (flowLevel == 0 && !scalarData.isBlockPlainAllowed)) { style = (scalarData.isSingleQuotedAllowed && !scalarData.hasSingleQuotes) ? ScalarStyle.SingleQuoted : ScalarStyle.DoubleQuoted; } if (string.IsNullOrEmpty(scalarData.value) && (flowLevel != 0 || isSimpleKeyContext)) { style = ScalarStyle.SingleQuoted; } if (noTag && !scalar.IsPlainImplicit) { style = ScalarStyle.SingleQuoted; } } if (style == ScalarStyle.SingleQuoted) { if (!scalarData.isSingleQuotedAllowed) { style = ScalarStyle.DoubleQuoted; } } if (style == ScalarStyle.Literal || style == ScalarStyle.Folded) { if (!scalarData.isBlockAllowed || flowLevel != 0 || isSimpleKeyContext) { style = ScalarStyle.DoubleQuoted; } } scalarData.style = style; } private void ProcessScalar() { switch (scalarData.style) { case ScalarStyle.Plain: WritePlainScalar(scalarData.value, !isSimpleKeyContext); break; case ScalarStyle.SingleQuoted: WriteSingleQuotedScalar(scalarData.value, !isSimpleKeyContext); break; case ScalarStyle.DoubleQuoted: WriteDoubleQuotedScalar(scalarData.value, !isSimpleKeyContext); break; case ScalarStyle.Literal: WriteLiteralScalar(scalarData.value); break; case ScalarStyle.Folded: WriteFoldedScalar(scalarData.value); break; default: throw new InvalidOperationException(); } } #region Write scalar Methods private void WritePlainScalar(string value, bool allowBreaks) { if (!isWhitespace) { Write(' '); } var previousSpace = false; var previousBreak = false; for (var index = 0; index < value.Length; ++index) { var character = value[index]; char breakCharacter; if (IsSpace(character)) { if (allowBreaks && !previousSpace && column > bestWidth && index + 1 < value.Length && value[index + 1] != ' ') { WriteIndent(); } else { Write(character); } previousSpace = true; } else if (IsBreak(character, out breakCharacter)) { if (!previousBreak && character == '\n') { WriteBreak(); } WriteBreak(breakCharacter); isIndentation = true; previousBreak = true; } else { if (previousBreak) { WriteIndent(); } Write(character); isIndentation = false; previousSpace = false; previousBreak = false; } } isWhitespace = false; isIndentation = false; if (isRootContext) { isOpenEnded = true; } } private void WriteSingleQuotedScalar(string value, bool allowBreaks) { WriteIndicator("'", true, false, false); var previousSpace = false; var previousBreak = false; for (var index = 0; index < value.Length; ++index) { var character = value[index]; char breakCharacter; if (character == ' ') { if (allowBreaks && !previousSpace && column > bestWidth && index != 0 && index + 1 < value.Length && value[index + 1] != ' ') { WriteIndent(); } else { Write(character); } previousSpace = true; } else if (IsBreak(character, out breakCharacter)) { if (!previousBreak && character == '\n') { WriteBreak(); } WriteBreak(breakCharacter); isIndentation = true; previousBreak = true; } else { if (previousBreak) { WriteIndent(); } if (character == '\'') { Write(character); } Write(character); isIndentation = false; previousSpace = false; previousBreak = false; } } WriteIndicator("'", false, false, false); isWhitespace = false; isIndentation = false; } private void WriteDoubleQuotedScalar(string value, bool allowBreaks) { WriteIndicator("\"", true, false, false); var previousSpace = false; for (var index = 0; index < value.Length; ++index) { var character = value[index]; char breakCharacter; if (!IsPrintable(character) || IsBreak(character, out breakCharacter) || character == '"' || character == '\\') { Write('\\'); switch (character) { case '\0': Write('0'); break; case '\x7': Write('a'); break; case '\x8': Write('b'); break; case '\x9': Write('t'); break; case '\xA': Write('n'); break; case '\xB': Write('v'); break; case '\xC': Write('f'); break; case '\xD': Write('r'); break; case '\x1B': Write('e'); break; case '\x22': Write('"'); break; case '\x5C': Write('\\'); break; case '\x85': Write('N'); break; case '\xA0': Write('_'); break; case '\x2028': Write('L'); break; case '\x2029': Write('P'); break; default: var code = (ushort)character; if (code <= 0xFF) { Write('x'); Write(code.ToString("X02", CultureInfo.InvariantCulture)); } else if (IsHighSurrogate(character)) { if (index + 1 < value.Length && IsLowSurrogate(value[index + 1])) { Write('U'); Write(char.ConvertToUtf32(character, value[index + 1]).ToString("X08", CultureInfo.InvariantCulture)); index++; } else { throw new SyntaxErrorException("While writing a quoted scalar, found an orphaned high surrogate."); } } else { Write('u'); Write(code.ToString("X04", CultureInfo.InvariantCulture)); } break; } previousSpace = false; } else if (character == ' ') { if (allowBreaks && !previousSpace && column > bestWidth && index > 0 && index + 1 < value.Length) { WriteIndent(); if (value[index + 1] == ' ') { Write('\\'); } } else { Write(character); } previousSpace = true; } else { Write(character); previousSpace = false; } } WriteIndicator("\"", false, false, false); isWhitespace = false; isIndentation = false; } private void WriteLiteralScalar(string value) { var previousBreak = true; WriteIndicator("|", true, false, false); WriteBlockScalarHints(value); WriteBreak(); isIndentation = true; isWhitespace = true; for (int i = 0; i < value.Length; ++i) { var character = value[i]; if (character == '\r' && (i + 1) < value.Length && value[i + 1] == '\n') { continue; } char breakCharacter; if (IsBreak(character, out breakCharacter)) { WriteBreak(breakCharacter); isIndentation = true; previousBreak = true; } else { if (previousBreak) { WriteIndent(); } Write(character); isIndentation = false; previousBreak = false; } } } private void WriteFoldedScalar(string value) { var previousBreak = true; var leadingSpaces = true; WriteIndicator(">", true, false, false); WriteBlockScalarHints(value); WriteBreak(); isIndentation = true; isWhitespace = true; for (var i = 0; i < value.Length; ++i) { var character = value[i]; char breakCharacter, ignoredBreak; if (IsBreak(character, out breakCharacter)) { if (!previousBreak && !leadingSpaces && character == '\n') { var k = 0; while (i + k < value.Length && IsBreak(value[i + k], out ignoredBreak)) { ++k; } if (i + k < value.Length && !(IsBlank(value[i + k]) || IsBreak(value[i + k], out ignoredBreak))) { WriteBreak(); } } WriteBreak(breakCharacter); isIndentation = true; previousBreak = true; } else { if (previousBreak) { WriteIndent(); leadingSpaces = IsBlank(character); } if (!previousBreak && character == ' ' && i + 1 < value.Length && value[i + 1] != ' ' && column > bestWidth) { WriteIndent(); } else { Write(character); } isIndentation = false; previousBreak = false; } } } // Todo: isn't this what CharacterAnalyser is for? private static bool IsSpace(char character) { return character == ' '; } private static bool IsBreak(char character, out char breakChar) { switch (character) { case '\r': case '\n': case '\x85': breakChar = '\n'; return true; case '\x2028': case '\x2029': breakChar = character; return true; default: breakChar = '\0'; return false; } } private static bool IsBlank(char character) { return character == ' ' || character == '\t'; } private static bool IsPrintable(char character) { return character == '\x9' || character == '\xA' || character == '\xD' || (character >= '\x20' && character <= '\x7E') || character == '\x85' || (character >= '\xA0' && character <= '\xD7FF') || (character >= '\xE000' && character <= '\xFFFD'); } private static bool IsHighSurrogate(char c) { return 0xD800 <= c && c <= 0xDBFF; } private static bool IsLowSurrogate(char c) { return 0xDC00 <= c && c <= 0xDFFF; } #endregion /// <summary> /// Expect SEQUENCE-START. /// </summary> private void EmitSequenceStart(ParsingEvent evt) { ProcessAnchor(); ProcessTag(); var sequenceStart = (SequenceStart)evt; if (flowLevel != 0 || isCanonical || sequenceStart.Style == SequenceStyle.Flow || CheckEmptySequence()) { state = EmitterState.FlowSequenceFirstItem; } else { state = EmitterState.BlockSequenceFirstItem; } } /// <summary> /// Expect MAPPING-START. /// </summary> private void EmitMappingStart(ParsingEvent evt) { ProcessAnchor(); ProcessTag(); var mappingStart = (MappingStart)evt; if (flowLevel != 0 || isCanonical || mappingStart.Style == MappingStyle.Flow || CheckEmptyMapping()) { state = EmitterState.FlowMappingFirstKey; } else { state = EmitterState.BlockMappingFirstKey; } } private void ProcessAnchor() { if (anchorData.anchor != null) { WriteIndicator(anchorData.isAlias ? "*" : "&", true, false, false); WriteAnchor(anchorData.anchor); } } private void ProcessTag() { if (tagData.handle == null && tagData.suffix == null) { return; } if (tagData.handle != null) { WriteTagHandle(tagData.handle); if (tagData.suffix != null) { WriteTagContent(tagData.suffix, false); } } else { WriteIndicator("!<", true, false, false); WriteTagContent(tagData.suffix, false); WriteIndicator(">", false, false, false); } } /// <summary> /// Expect DOCUMENT-END. /// </summary> private void EmitDocumentEnd(ParsingEvent evt) { var documentEnd = evt as DocumentEnd; if (documentEnd != null) { WriteIndent(); if (!documentEnd.IsImplicit) { WriteIndicator("...", true, false, false); WriteIndent(); isDocumentEndWritten = true; } state = EmitterState.DocumentStart; tagDirectives.Clear(); } else { throw new YamlException("Expected DOCUMENT-END."); } } /// <summary> /// Expect a flow item node. /// </summary> private void EmitFlowSequenceItem(ParsingEvent evt, bool isFirst) { if (isFirst) { WriteIndicator("[", true, true, false); IncreaseIndent(true, false); ++flowLevel; } if (evt is SequenceEnd) { --flowLevel; indent = indents.Pop(); if (isCanonical && !isFirst) { WriteIndicator(",", false, false, false); WriteIndent(); } WriteIndicator("]", false, false, false); state = states.Pop(); return; } if (!isFirst) { WriteIndicator(",", false, false, false); } if (isCanonical || column > bestWidth) { WriteIndent(); } states.Push(EmitterState.FlowSequenceItem); EmitNode(evt, false, false, false); } /// <summary> /// Expect a flow key node. /// </summary> private void EmitFlowMappingKey(ParsingEvent evt, bool isFirst) { if (isFirst) { WriteIndicator("{", true, true, false); IncreaseIndent(true, false); ++flowLevel; } if (evt is MappingEnd) { --flowLevel; indent = indents.Pop(); if (isCanonical && !isFirst) { WriteIndicator(",", false, false, false); WriteIndent(); } WriteIndicator("}", false, false, false); state = states.Pop(); return; } if (!isFirst) { WriteIndicator(",", false, false, false); } if (isCanonical || column > bestWidth) { WriteIndent(); } if (!isCanonical && CheckSimpleKey()) { states.Push(EmitterState.FlowMappingSimpleValue); EmitNode(evt, false, true, true); } else { WriteIndicator("?", true, false, false); states.Push(EmitterState.FlowMappingValue); EmitNode(evt, false, true, false); } } /// <summary> /// Expect a flow value node. /// </summary> private void EmitFlowMappingValue(ParsingEvent evt, bool isSimple) { if (isSimple) { WriteIndicator(":", false, false, false); } else { if (isCanonical || column > bestWidth) { WriteIndent(); } WriteIndicator(":", true, false, false); } states.Push(EmitterState.FlowMappingKey); EmitNode(evt, false, true, false); } /// <summary> /// Expect a block item node. /// </summary> private void EmitBlockSequenceItem(ParsingEvent evt, bool isFirst) { if (isFirst) { IncreaseIndent(false, (isMappingContext && !isIndentation)); } if (evt is SequenceEnd) { indent = indents.Pop(); state = states.Pop(); return; } WriteIndent(); WriteIndicator("-", true, false, true); states.Push(EmitterState.BlockSequenceItem); EmitNode(evt, false, false, false); } /// <summary> /// Expect a block key node. /// </summary> private void EmitBlockMappingKey(ParsingEvent evt, bool isFirst) { if (isFirst) { IncreaseIndent(false, false); } if (evt is MappingEnd) { indent = indents.Pop(); state = states.Pop(); return; } WriteIndent(); if (CheckSimpleKey()) { states.Push(EmitterState.BlockMappingSimpleValue); EmitNode(evt, false, true, true); } else { WriteIndicator("?", true, false, true); states.Push(EmitterState.BlockMappingValue); EmitNode(evt, false, true, false); } } /// <summary> /// Expect a block value node. /// </summary> private void EmitBlockMappingValue(ParsingEvent evt, bool isSimple) { if (isSimple) { WriteIndicator(":", false, false, false); } else { WriteIndent(); WriteIndicator(":", true, false, true); } states.Push(EmitterState.BlockMappingKey); EmitNode(evt, false, true, false); } private void IncreaseIndent(bool isFlow, bool isIndentless) { indents.Push(indent); if (indent < 0) { indent = isFlow ? bestIndent : 0; } else if (!isIndentless) { indent += bestIndent; } } #region Check Methods /// <summary> /// Check if the document content is an empty scalar. /// </summary> private bool CheckEmptyDocument() { var index = 0; foreach (var parsingEvent in events) { index++; if (index == 2) { var scalar = parsingEvent as Scalar; if (scalar != null) { return string.IsNullOrEmpty(scalar.Value); } break; } } return false; } /// <summary> /// Check if the next node can be expressed as a simple key. /// </summary> private bool CheckSimpleKey() { if (events.Count < 1) { return false; } int length; switch (events.Peek().Type) { case EventType.Alias: length = SafeStringLength(anchorData.anchor); break; case EventType.Scalar: if (scalarData.isMultiline) { return false; } length = SafeStringLength(anchorData.anchor) + SafeStringLength(tagData.handle) + SafeStringLength(tagData.suffix) + SafeStringLength(scalarData.value); break; case EventType.SequenceStart: if (!CheckEmptySequence()) { return false; } length = SafeStringLength(anchorData.anchor) + SafeStringLength(tagData.handle) + SafeStringLength(tagData.suffix); break; case EventType.MappingStart: if (!CheckEmptySequence()) { return false; } length = SafeStringLength(anchorData.anchor) + SafeStringLength(tagData.handle) + SafeStringLength(tagData.suffix); break; default: return false; } return length <= MaxAliasLength; } private int SafeStringLength(string value) { return value == null ? 0 : value.Length; } private bool CheckEmptySequence() { if (events.Count < 2) { return false; } // Todo: must be something better than this FakeList var eventList = new FakeList<ParsingEvent>(events); return eventList[0] is SequenceStart && eventList[1] is SequenceEnd; } private bool CheckEmptyMapping() { if (events.Count < 2) { return false; } // Todo: must be something better than this FakeList var eventList = new FakeList<ParsingEvent>(events); return eventList[0] is MappingStart && eventList[1] is MappingEnd; } #endregion #region Write Methods private void WriteBlockScalarHints(string value) { var analyzer = new CharacterAnalyzer<StringLookAheadBuffer>(new StringLookAheadBuffer(value)); if (analyzer.IsSpace() || analyzer.IsBreak()) { var indentHint = string.Format(CultureInfo.InvariantCulture, "{0}", bestIndent); WriteIndicator(indentHint, false, false, false); } isOpenEnded = false; string chompHint = null; if (value.Length == 0 || !analyzer.IsBreak(value.Length - 1)) { chompHint = "-"; } else if (value.Length >= 2 && analyzer.IsBreak(value.Length - 2)) { chompHint = "+"; isOpenEnded = true; } if (chompHint != null) { WriteIndicator(chompHint, false, false, false); } } private void WriteIndicator(string indicator, bool needWhitespace, bool whitespace, bool indentation) { if (needWhitespace && !isWhitespace) { Write(' '); } Write(indicator); isWhitespace = whitespace; isIndentation &= indentation; isOpenEnded = false; } private void WriteIndent() { var currentIndent = Math.Max(indent, 0); if (!isIndentation || column > currentIndent || (column == currentIndent && !isWhitespace)) { WriteBreak(); } while (column < currentIndent) { Write(' '); } isWhitespace = true; isIndentation = true; } private void WriteAnchor(string value) { Write(value); isWhitespace = false; isIndentation = false; } private void WriteTagHandle(string value) { if (!isWhitespace) { Write(' '); } Write(value); isWhitespace = false; isIndentation = false; } private void WriteTagContent(string value, bool needsWhitespace) { if (needsWhitespace && !isWhitespace) { Write(' '); } Write(UrlEncode(value)); isWhitespace = false; isIndentation = false; } private string UrlEncode(string text) { return uriReplacer.Replace(text, delegate (Match match) { var buffer = new StringBuilder(); foreach (var toEncode in Encoding.UTF8.GetBytes(match.Value)) { buffer.AppendFormat("%{0:X02}", toEncode); } return buffer.ToString(); }); } private void Write(char value) { output.Write(value); ++column; } private void Write(string value) { output.Write(value); column += value.Length; } private void WriteBreak(char breakCharacter = '\n') { if (breakCharacter == '\n') { output.WriteLine(); } else { output.Write(breakCharacter); } column = 0; } #endregion } }
#region Licence... //----------------------------------------------------------------------------- // Date: 30/10/10 Time: 8:21 // Module: ScriptLauncherBuilder.cs // Classes: ScriptLauncherBuilder // // This module contains the definition of the ScriptLauncherBuilder class. Which implements // compiling light-weigh host application for the script execution. // // Written by Oleg Shilo ([email protected]) // Copyright (c) 2004-2012. All rights reserved. // // Redistribution and use of this code WITHOUT MODIFICATIONS are permitted provided that // the following conditions are met: // 1. Redistributions must retain the above copyright notice, this list of conditions // and the following disclaimer. // 2. Neither the name of an author nor the names of the contributors may be used // to endorse or promote products derived from this software without specific // prior written permission. // // Redistribution and use of this code WITH MODIFICATIONS are permitted provided that all // above conditions are met and software is not used or sold for profit. // // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // // Caution: Bugs are expected! //---------------------------------------------- #endregion Licence... using System; using System.IO; using System.Reflection; #if net1 using System.Collections; #else using System.Collections.Generic; #endif using System.Text; using CSScriptLibrary; using System.Runtime.InteropServices; using System.Threading; using System.CodeDom.Compiler; //using System.Windows.Forms; using System.Globalization; using System.Diagnostics; using Microsoft.CSharp; namespace csscript { internal class ScriptLauncherBuilder { public static string GetLauncherName(string assembly) { return assembly + ".host.exe"; } // UnitTest // Make Surrogate scenario to compile conditionally // + check and delete the exe before building // + set Appartment state // + update all ExecutionClients incliding csslib // + when starting remove css and //x args //+ try to solve limitations with console Input redurectionlimi //+ ensure launcher is not build when building dll/exe without execution public string BuildSurrogateLauncher(string scriptAssembly, string tragetFramework, CompilerParameters compilerParams, ApartmentState appartmentState) { //string #if !net4 throw new ApplicationException("Cannot build surrogate host application because this script engine is build against early version of CLR."); #else var provider = CodeDomProvider.CreateProvider("C#", new Dictionary<string, string> { { "CompilerVersion", tragetFramework } }); compilerParams.OutputAssembly = GetLauncherName(scriptAssembly); compilerParams.GenerateExecutable = true; compilerParams.GenerateInMemory = false; compilerParams.IncludeDebugInformation = false; try { if (File.Exists(compilerParams.OutputAssembly)) File.Delete(compilerParams.OutputAssembly); } catch (Exception e) { throw new ApplicationException("Cannot build surrogate host application", e); } if (compilerParams.CompilerOptions != null) compilerParams.CompilerOptions = compilerParams.CompilerOptions.Replace("/d:TRACE", "") .Replace("/d:DEBUG", ""); if (!AppInfo.appConsole) compilerParams.CompilerOptions += " /target:winexe"; string refAssemblies = ""; string appartment = "[STAThread]"; if (appartmentState == ApartmentState.MTA) appartment = "[" + appartmentState + "Thread]"; else if (appartmentState == ApartmentState.Unknown) appartment = ""; foreach (string asm in compilerParams.ReferencedAssemblies) if (File.Exists(asm)) //ignore GAC (not full path) assemblies refAssemblies += Assembly.ReflectionOnlyLoadFrom(asm).FullName + ":" + asm + ";"; string code = launcherCode .Replace("${REF_ASSEMBLIES}", refAssemblies) .Replace("${APPARTMENT}", appartment) .Replace("${ASM_MANE}", Path.GetFileName(scriptAssembly)); CompilerResults retval; bool debugLauncher = false; if (debugLauncher) { compilerParams.IncludeDebugInformation = true; compilerParams.CompilerOptions += " /d:DEBUG"; //string launcherFile = @"C:\Users\OSH\Desktop\New folder (2)\script.launcher.cs"; string launcherFile = Path.GetTempFileName(); File.WriteAllText(launcherFile, code); retval = provider.CompileAssemblyFromFile(compilerParams, launcherFile); } else retval = provider.CompileAssemblyFromSource(compilerParams, code); if (retval.Errors.Count != 0) throw CompilerException.Create(retval.Errors, true); CSSUtils.SetTimestamp(compilerParams.OutputAssembly, scriptAssembly); return compilerParams.OutputAssembly; #endif } const string launcherCode = @"using System; using System.Collections; using System.IO; using System.Reflection; class Script { ${APPARTMENT} static public int Main(string[] args) { try { AppDomain.CurrentDomain.AssemblyResolve += new ResolveEventHandler(CurrentDomain_AssemblyResolve); MainImpl(args); } catch(Exception e) { Console.WriteLine(e.ToString()); return 1; } return Environment.ExitCode; } static public void MainImpl(string[] args) { System.Diagnostics.Debug.Assert(false); string scriptAssembly = """"; bool debug = false; ArrayList newArgs = new ArrayList(); foreach (string arg in args) if (arg.StartsWith(""/css_host_dbg:"")) debug = (arg == ""/css_host_dbg:true""); else if (arg.StartsWith(""/css_host_asm:"")) scriptAssembly = arg.Substring(""/css_host_asm:"".Length); else newArgs.Add(arg); if (debug) { System.Diagnostics.Debugger.Launch(); if (System.Diagnostics.Debugger.IsAttached) System.Diagnostics.Debugger.Break(); } if (scriptAssembly == """") { scriptAssembly = Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), ""${ASM_MANE}""); } InvokeStaticMain(Assembly.LoadFrom(scriptAssembly), (string[])newArgs.ToArray(typeof(string))); } static void InvokeStaticMain(Assembly compiledAssembly, string[] scriptArgs) { MethodInfo method = null; foreach (Module m in compiledAssembly.GetModules()) { foreach (Type t in m.GetTypes()) { BindingFlags bf = BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.InvokeMethod | BindingFlags.Static; foreach (MemberInfo mi in t.GetMembers(bf)) { if (mi.Name == ""Main"") { method = t.GetMethod(mi.Name, bf); } if (method != null) break; } if (method != null) break; } if (method != null) break; } if (method != null) { object retval = null; if (method.GetParameters().Length != 0) retval = method.Invoke(new object(), new object[] { (Object)scriptArgs }); else retval = method.Invoke(new object(), null); if (retval != null) { try { Environment.ExitCode = int.Parse(retval.ToString()); } catch { } } } else { throw new ApplicationException(""Cannot find entry point. Make sure script file contains method: 'public static Main(...)'""); } } static string refAssemblies = @""${REF_ASSEMBLIES}""; static System.Reflection.Assembly CurrentDomain_AssemblyResolve(object sender, ResolveEventArgs args) { if (refAssemblies != """") { foreach (string asm in refAssemblies.Split(';')) if (asm.StartsWith(args.Name)) return Assembly.LoadFrom(asm.Substring(args.Name.Length + 1)); } return null; } }"; } }
using System; using System.Collections.Generic; using System.Text; namespace Codehaus.Parsec { /// <summary> Describes Parse error.</summary> /// <author> Ben Yu /// /// Nov 18, 2004 /// </author> public interface ParseError { /// <summary> Gets the index number in the original source.</summary> /// <returns> the index number. /// </returns> int Index { get; } /// <summary> Get the "... encountered" error.</summary> /// <returns> the actually encountered token when error happens. /// </returns> string getEncountered(); /// <summary> Get the "unexpected ..." error.</summary> /// <returns> all the unexpected. /// </returns> string[] getUnexpected(); /// <summary> Get the user error messages.</summary> /// <returns> all the user-provided message. /// </returns> string[] getMessages(); /// <summary> Get the "expecting ..." errors.</summary> /// <returns> all the expectings. /// </returns> string[] getExpecting(); } abstract class AbstractParsecError { internal object Exception { get { return exception; } } internal bool Thrown { get { return exception != null; } } internal abstract ParsecError render(); internal abstract AbstractParsecError setExpecting(string s); internal int Precedence { get { return precedence; } } public int Index { get { return at; } } internal bool noMerge() { return nomerge; } private readonly bool nomerge; private readonly int at; private readonly int precedence; private readonly object exception; internal AbstractParsecError(bool nomerge, int at, int pred, object exception) { this.nomerge = nomerge; this.at = at; this.precedence = pred; this.exception = exception; } internal static AbstractParsecError mergeError(AbstractParsecError e1, AbstractParsecError e2) { if (e1 == null) return e2; if (e2 == null) return e1; if (e1 == e2) return e1; int pred = e1.precedence; int pred2 = e2.precedence; int at = e1.at; int at2 = e2.at; if (at == at2) { if (pred2 > pred) { return e2; } else if (pred > pred2) { return e1; } //else return e1; } else if (at > at2) { /*if(pred < pred2){ return e2; } else */ return e1; } else if (at < at2) { /*if(pred > pred2){ return e1; } else */ return e2; } if (e1.nomerge && e2.nomerge) { return e1; } return new MergedParsecError(at, pred, e1, e2); } } sealed class ParsecErrorExpecting : AbstractParsecError { internal override ParsecError render() { return err.render().setExpecting(s).render(); } internal override AbstractParsecError setExpecting(string s) { return new ParsecErrorExpecting(noMerge(), Index, Precedence, Exception, err, s); } private readonly AbstractParsecError err; private readonly string s; internal ParsecErrorExpecting(bool nomerge, int at, int pred, object exception, AbstractParsecError err, string s) : base(nomerge, at, pred, exception) { this.err = err; this.s = s; } } sealed class MergedParsecError : AbstractParsecError { internal override ParsecError render() { return getMerged(err1.render(), err2.render()); } internal override AbstractParsecError setExpecting(string s) { return ParsecError.raiseExpecting(Index, s, this); } private ParsecError getMerged(ParsecError e1, ParsecError e2) { return ParsecError.mergeError(Index, Exception, e1, e2); } private readonly AbstractParsecError err1; private readonly AbstractParsecError err2; internal MergedParsecError(int ind, int pred, AbstractParsecError err1, AbstractParsecError err2) : base(false, ind, pred, mergeObj(err1.Exception, err2.Exception)) { this.err1 = err1; this.err2 = err2; } internal static object mergeObj(object a, object b) { return a == null ? b : a; } } sealed class ParsecError : AbstractParsecError, ParseError { //private static readonly string[] err0 = new string[0]; internal override ParsecError render() { return this; } private readonly object sys_unexpected; private readonly string[] unexpected; private readonly string[] expecting; private readonly string[] raw; internal static int getPrecedenceForExpecting(string s) { return s != null ? 2 : 1; } private ParsecError(bool nm, int at, object sys, string[] unexpected, string[] expecting, string[] raw, object exception) : base(nm, at, (expecting != null || unexpected != null || raw != null) ? 2 : 1, exception) { this.sys_unexpected = sys; this.unexpected = unexpected; this.expecting = expecting; this.raw = raw; } internal override AbstractParsecError setExpecting(string s) { return new ParsecError(false, Index, sys_unexpected, unexpected, new string[] { s }, raw, Exception); } internal static ParsecError raiseRaw(int at, string msg) { return new ParsecError(false, at, null, null, null, new string[] { msg }, null); } internal static ParsecError raiseSysUnexpected(int at, object obj) { return new ParsecError(true, at, obj, null, null, null, null); } internal static ParsecError raiseUnexpected(int at , string s) { return new ParsecError(false, at, null, new string[] { s }, null, null, null); } internal static ParsecError raiseExpecting(int at, string s) { return new ParsecError(false, at, null, null, new string[] { s }, null, null); } internal static AbstractParsecError raiseExpecting(int at, string s, AbstractParsecError err) { return new ParsecErrorExpecting(false, at, myPrecedence(err.Precedence, s), err.Exception, err, s); } private static int max(int a, int b) { return a > b ? a : b; } private static int myPrecedence(int pred, string s) { return max(pred, ParsecError.getPrecedenceForExpecting(s)); } internal static ParsecError throwException(int at, object e) { return new ParsecError(false, at, null, null, null, null, e); } internal object getSysUnexpected() { return sys_unexpected; } public string getEncountered() { if (sys_unexpected == null) return null; else return sys_unexpected.ToString(); } public string[] getUnexpected() { return unexpected; } public string[] getExpecting() { return expecting; } public string[] getMessages() { return raw; } internal static ParsecError noError() { return null; } private static string[] mergeMsgs(string[] a, string[] b) { if (a == null) return b; if (b == null) return a; if (a == b) return a; string[] msgs = new string[a.Length + b.Length]; /*for(int i=0; i<a.Length; i++){ msgs[i] = a[i]; } for(int i=0; i<b.Length; i++){ msgs[i+a.Length] = b[i]; }*/ Array.Copy(a, msgs, a.Length); //System.arraycopy (a, 0, msgs, 0, a.Length); Array.Copy(b, 0, msgs, a.Length, b.Length); //System.arraycopy (b, 0, msgs, a.Length, b.Length); return msgs; } internal static ParsecError mergeError( int ind, object exception, ParsecError e1, ParsecError e2) { return new ParsecError(false, ind, MergedParsecError.mergeObj(e1.sys_unexpected, e2.sys_unexpected), mergeMsgs(e1.unexpected, e2.unexpected), mergeMsgs(e1.expecting, e2.expecting), mergeMsgs(e1.raw, e2.raw), exception ); } } }
using System; using System.Collections.Generic; using System.Threading.Tasks; using Orleans.GrainReferences; using Orleans.Metadata; using Orleans.Runtime; namespace Orleans { /// <summary> /// Factory for accessing grains. /// </summary> internal class GrainFactory : IInternalGrainFactory { private GrainReferenceRuntime grainReferenceRuntime; /// <summary> /// The cache of typed system target references. /// </summary> private readonly Dictionary<(GrainId, Type), ISystemTarget> typedSystemTargetReferenceCache = new Dictionary<(GrainId, Type), ISystemTarget>(); private readonly GrainReferenceActivator referenceActivator; private readonly GrainInterfaceTypeResolver interfaceTypeResolver; private readonly GrainInterfaceTypeToGrainTypeResolver interfaceTypeToGrainTypeResolver; private readonly IRuntimeClient runtimeClient; public GrainFactory( IRuntimeClient runtimeClient, GrainReferenceActivator referenceActivator, GrainInterfaceTypeResolver interfaceTypeResolver, GrainInterfaceTypeToGrainTypeResolver interfaceToTypeResolver) { this.runtimeClient = runtimeClient; this.referenceActivator = referenceActivator; this.interfaceTypeResolver = interfaceTypeResolver; this.interfaceTypeToGrainTypeResolver = interfaceToTypeResolver; } private GrainReferenceRuntime GrainReferenceRuntime => this.grainReferenceRuntime ??= (GrainReferenceRuntime)this.runtimeClient.GrainReferenceRuntime; /// <inheritdoc /> public TGrainInterface GetGrain<TGrainInterface>(Guid primaryKey, string grainClassNamePrefix = null) where TGrainInterface : IGrainWithGuidKey { var grainKey = GrainIdKeyExtensions.CreateGuidKey(primaryKey); return (TGrainInterface)GetGrain(typeof(TGrainInterface), grainKey, grainClassNamePrefix: grainClassNamePrefix); } /// <inheritdoc /> public TGrainInterface GetGrain<TGrainInterface>(long primaryKey, string grainClassNamePrefix = null) where TGrainInterface : IGrainWithIntegerKey { var grainKey = GrainIdKeyExtensions.CreateIntegerKey(primaryKey); return (TGrainInterface)GetGrain(typeof(TGrainInterface), grainKey, grainClassNamePrefix: grainClassNamePrefix); } /// <inheritdoc /> public TGrainInterface GetGrain<TGrainInterface>(string primaryKey, string grainClassNamePrefix = null) where TGrainInterface : IGrainWithStringKey { var grainKey = IdSpan.Create(primaryKey); return (TGrainInterface)GetGrain(typeof(TGrainInterface), grainKey, grainClassNamePrefix: grainClassNamePrefix); } /// <inheritdoc /> public TGrainInterface GetGrain<TGrainInterface>(Guid primaryKey, string keyExtension, string grainClassNamePrefix = null) where TGrainInterface : IGrainWithGuidCompoundKey { DisallowNullOrWhiteSpaceKeyExtensions(keyExtension); var grainKey = GrainIdKeyExtensions.CreateGuidKey(primaryKey, keyExtension); return (TGrainInterface)GetGrain(typeof(TGrainInterface), grainKey, grainClassNamePrefix: grainClassNamePrefix); } /// <inheritdoc /> public TGrainInterface GetGrain<TGrainInterface>(long primaryKey, string keyExtension, string grainClassNamePrefix = null) where TGrainInterface : IGrainWithIntegerCompoundKey { DisallowNullOrWhiteSpaceKeyExtensions(keyExtension); var grainKey = GrainIdKeyExtensions.CreateIntegerKey(primaryKey, keyExtension); return (TGrainInterface)GetGrain(typeof(TGrainInterface), grainKey, grainClassNamePrefix: grainClassNamePrefix); } /// <inheritdoc /> public Task<TGrainObserverInterface> CreateObjectReference<TGrainObserverInterface>(IGrainObserver obj) where TGrainObserverInterface : IGrainObserver { return Task.FromResult(this.CreateObjectReference<TGrainObserverInterface>((IAddressable)obj)); } /// <inheritdoc /> public Task DeleteObjectReference<TGrainObserverInterface>( IGrainObserver obj) where TGrainObserverInterface : IGrainObserver { this.runtimeClient.DeleteObjectReference(obj); return Task.CompletedTask; } /// <inheritdoc /> public TGrainObserverInterface CreateObjectReference<TGrainObserverInterface>(IAddressable obj) where TGrainObserverInterface : IAddressable { return (TGrainObserverInterface)this.CreateObjectReference(typeof(TGrainObserverInterface), obj); } /// <summary> /// Casts the provided <paramref name="grain"/> to the specified interface /// </summary> /// <typeparam name="TGrainInterface">The target grain interface type.</typeparam> /// <param name="grain">The grain reference being cast.</param> /// <returns> /// A reference to <paramref name="grain"/> which implements <typeparamref name="TGrainInterface"/>. /// </returns> public TGrainInterface Cast<TGrainInterface>(IAddressable grain) { var interfaceType = typeof(TGrainInterface); return (TGrainInterface)this.Cast(grain, interfaceType); } /// <summary> /// Casts the provided <paramref name="grain"/> to the provided <paramref name="interfaceType"/>. /// </summary> /// <param name="grain">The grain.</param> /// <param name="interfaceType">The resulting interface type.</param> /// <returns>A reference to <paramref name="grain"/> which implements <paramref name="interfaceType"/>.</returns> public object Cast(IAddressable grain, Type interfaceType) => this.GrainReferenceRuntime.Cast(grain, interfaceType); public TGrainInterface GetSystemTarget<TGrainInterface>(GrainType grainType, SiloAddress destination) where TGrainInterface : ISystemTarget { var grainId = SystemTargetGrainId.Create(grainType, destination); return this.GetSystemTarget<TGrainInterface>(grainId.GrainId); } /// <summary> /// Gets a reference to the specified system target. /// </summary> /// <typeparam name="TGrainInterface">The system target interface.</typeparam> /// <param name="grainId">The id of the target.</param> /// <returns>A reference to the specified system target.</returns> public TGrainInterface GetSystemTarget<TGrainInterface>(GrainId grainId) where TGrainInterface : ISystemTarget { ISystemTarget reference; ValueTuple<GrainId, Type> key = ValueTuple.Create(grainId, typeof(TGrainInterface)); lock (this.typedSystemTargetReferenceCache) { if (this.typedSystemTargetReferenceCache.TryGetValue(key, out reference)) { return (TGrainInterface)reference; } reference = this.GetGrain<TGrainInterface>(grainId); this.typedSystemTargetReferenceCache[key] = reference; return (TGrainInterface)reference; } } /// <inheritdoc /> public TGrainInterface GetGrain<TGrainInterface>(GrainId grainId) where TGrainInterface : IAddressable { return (TGrainInterface)this.CreateGrainReference(typeof(TGrainInterface), grainId); } /// <inheritdoc /> public IAddressable GetGrain(GrainId grainId) => this.referenceActivator.CreateReference(grainId, default); /// <inheritdoc /> public IGrain GetGrain(Type grainInterfaceType, Guid key) { var grainKey = GrainIdKeyExtensions.CreateGuidKey(key); return (IGrain)GetGrain(grainInterfaceType, grainKey, grainClassNamePrefix: null); } /// <inheritdoc /> public IGrain GetGrain(Type grainInterfaceType, long key) { var grainKey = GrainIdKeyExtensions.CreateIntegerKey(key); return (IGrain)GetGrain(grainInterfaceType, grainKey, grainClassNamePrefix: null); } /// <inheritdoc /> public IGrain GetGrain(Type grainInterfaceType, string key) { var grainKey = IdSpan.Create(key); return (IGrain)GetGrain(grainInterfaceType, grainKey, grainClassNamePrefix: null); } /// <inheritdoc /> public IGrain GetGrain(Type grainInterfaceType, Guid key, string keyExtension) { var grainKey = GrainIdKeyExtensions.CreateGuidKey(key, keyExtension); return (IGrain)GetGrain(grainInterfaceType, grainKey, grainClassNamePrefix: null); } /// <inheritdoc /> public IGrain GetGrain(Type grainInterfaceType, long key, string keyExtension) { var grainKey = GrainIdKeyExtensions.CreateIntegerKey(key, keyExtension); return (IGrain)GetGrain(grainInterfaceType, grainKey, grainClassNamePrefix: null); } private IAddressable GetGrain(Type interfaceType, IdSpan grainKey, string grainClassNamePrefix) { var grainInterfaceType = this.interfaceTypeResolver.GetGrainInterfaceType(interfaceType); GrainType grainType; if (!string.IsNullOrWhiteSpace(grainClassNamePrefix)) { grainType = this.interfaceTypeToGrainTypeResolver.GetGrainType(grainInterfaceType, grainClassNamePrefix); } else { grainType = this.interfaceTypeToGrainTypeResolver.GetGrainType(grainInterfaceType); } var grainId = GrainId.Create(grainType, grainKey); var grain = this.referenceActivator.CreateReference(grainId, grainInterfaceType); return grain; } public IAddressable GetGrain(GrainId grainId, GrainInterfaceType interfaceType) { return this.referenceActivator.CreateReference(grainId, interfaceType); } private object CreateGrainReference(Type interfaceType, GrainId grainId) { var grainInterfaceType = this.interfaceTypeResolver.GetGrainInterfaceType(interfaceType); return this.referenceActivator.CreateReference(grainId, grainInterfaceType); } private object CreateObjectReference(Type interfaceType, IAddressable obj) { if (!interfaceType.IsInterface) { throw new ArgumentException( $"The provided type parameter must be an interface. '{interfaceType.FullName}' is not an interface."); } if (!interfaceType.IsInstanceOfType(obj)) { throw new ArgumentException($"The provided object must implement '{interfaceType.FullName}'.", nameof(obj)); } return this.Cast(this.runtimeClient.CreateObjectReference(obj), interfaceType); } private static void DisallowNullOrWhiteSpaceKeyExtensions(string keyExt) { if (!string.IsNullOrWhiteSpace(keyExt)) return; if (null == keyExt) { throw new ArgumentNullException(nameof(keyExt)); } throw new ArgumentException("Key extension is empty or white space.", nameof(keyExt)); } } }
using System; using System.Collections.Generic; using System.Text; using System.IO; using Pk2 = PICkit2V2.PICkitFunctions; using PIC32MM = PICkit2V2.PIC32MMFunctions; namespace PICkit2V2 { class ImportExportHex { public static DateTime LastWriteTime = new DateTime(); public static Constants.FileRead ImportHexFile(String filePath, bool progMem, bool eeMem) { // NOTE: The device buffers being read into must all be set to blank value before getting here! if (filePath.Length > 4) { if ((filePath.Substring(filePath.Length - 4).ToUpper() == ".BIN") && Pk2.FamilyIsEEPROM()) { return ImportBINFile(filePath); } } try { FileInfo hexFile = new FileInfo(filePath); LastWriteTime = hexFile.LastWriteTime; TextReader hexRead = hexFile.OpenText(); int bytesPerWord = Pk2.DevFile.Families[Pk2.GetActiveFamily()].ProgMemHexBytes; int eeMemBytes = Pk2.DevFile.Families[Pk2.GetActiveFamily()].EEMemHexBytes; uint eeAddr = Pk2.DevFile.PartsList[Pk2.ActivePart].EEAddr; int progMemSizeBytes = (int)Pk2.DevFile.PartsList[Pk2.ActivePart].ProgramMem * bytesPerWord; int segmentAddress = 0; bool configRead = false; bool lineExceedsFlash = true; bool fileExceedsFlash = false; int userIDs = Pk2.DevFile.PartsList[Pk2.ActivePart].UserIDWords; uint userIDAddr = Pk2.DevFile.PartsList[Pk2.ActivePart].UserIDAddr; if (userIDAddr == 0) { userIDAddr = 0xFFFFFFFF; } int userIDMemBytes = Pk2.DevFile.Families[Pk2.GetActiveFamily()].UserIDHexBytes; // need to set config words to memory blank. int configWords = Pk2.DevFile.PartsList[Pk2.ActivePart].ConfigWords; bool[] configLoaded = new bool[configWords]; for (int cw = 0; cw < configWords; cw++) { Pk2.DeviceBuffers.ConfigWords[cw] = Pk2.DevFile.Families[Pk2.GetActiveFamily()].BlankValue; if (Pk2.DevFile.PartsList[Pk2.ActivePart].ConfigMasks[cw] == 0) configLoaded[cw] = true; // if mask is blank (no implemented bits) don't need it in file else configLoaded[cw] = false; // implemented bits, so warn if not in hex file. } int cfgBytesPerWord = bytesPerWord; uint programMemStart = 0; uint bootMemStart = 0; uint bootMemSize = Pk2.DevFile.PartsList[Pk2.ActivePart].BootFlash; if (Pk2.DevFile.Families[Pk2.GetActiveFamily()].BlankValue > 0xFFFFFF) { // PIC32 programMemStart = Constants.P32_PROGRAM_FLASH_START_ADDR; bootMemStart = Constants.P32_BOOT_FLASH_START_ADDR; progMemSizeBytes -= (int)bootMemSize * bytesPerWord; progMemSizeBytes += (int)programMemStart; cfgBytesPerWord = 2; } uint bootMemEnd = bootMemStart + (bootMemSize * (uint)bytesPerWord); int bootArrayStart = (int)(Pk2.DevFile.PartsList[Pk2.ActivePart].ProgramMem - bootMemSize); string fileLine = hexRead.ReadLine(); while (fileLine != null) { if ((fileLine[0] == ':') && (fileLine.Length >= 11)) { // skip line if not hex line entry,or not minimum length ":BBAAAATTCC" int byteCount = Int32.Parse(fileLine.Substring(1,2), System.Globalization.NumberStyles.HexNumber); int fileAddress = segmentAddress + Int32.Parse(fileLine.Substring(3, 4), System.Globalization.NumberStyles.HexNumber); int recordType = Int32.Parse(fileLine.Substring(7,2), System.Globalization.NumberStyles.HexNumber); if (recordType == 0) { // Data Record} if (fileLine.Length >= (11+ (2* byteCount))) { // skip if line isn't long enough for bytecount. for (int lineByte = 0; lineByte < byteCount; lineByte++) { int byteAddress = fileAddress + lineByte; // compute array address from hex file address # bytes per memory location int arrayAddress = (byteAddress - (int)programMemStart) / bytesPerWord; // compute byte position withing memory word int bytePosition = byteAddress % bytesPerWord; // get the byte value from hex file uint wordByte = 0xFFFFFF00 | UInt32.Parse(fileLine.Substring((9 + (2 * lineByte)), 2), System.Globalization.NumberStyles.HexNumber); // shift the byte into its proper position in the word. for (int shift = 0; shift < bytePosition; shift++) { // shift byte into proper position wordByte <<= 8; wordByte |= 0xFF; // shift in ones. } lineExceedsFlash = true; // if not in any memory section, then error // program memory section -------------------------------------------------- if ((byteAddress >= programMemStart) && (byteAddress < progMemSizeBytes)) { if (progMem) { // if importing program memory Pk2.DeviceBuffers.ProgramMemory[arrayAddress] &= wordByte; // add byte. } lineExceedsFlash = false; //NOTE: program memory locations containing config words may get modified // by the config section below that applies the config masks. } // boot memory section -------------------------------------------------- if ((bootMemSize > 0) && (byteAddress >= bootMemStart) && (byteAddress < bootMemEnd)) { arrayAddress = (int)(bootArrayStart + ((byteAddress - bootMemStart) / bytesPerWord)); if (progMem) { // if importing program memory Pk2.DeviceBuffers.ProgramMemory[arrayAddress] &= wordByte; // add byte. } lineExceedsFlash = false; //NOTE: program memory locations containing config words may get modified // by the config section below that applies the config masks. } // EE data section --------------------------------------------------------- if ((byteAddress >= eeAddr) && (eeAddr > 0) && (Pk2.DevFile.PartsList[Pk2.ActivePart].EEMem > 0)) { int eeAddress = (int)(byteAddress - eeAddr) / eeMemBytes; if (eeAddress < Pk2.DevFile.PartsList[Pk2.ActivePart].EEMem) { lineExceedsFlash = false; if (eeMem) { // skip if not importing EE Memory if (eeMemBytes == bytesPerWord) { // same # hex bytes per EE location as ProgMem location Pk2.DeviceBuffers.EEPromMemory[eeAddress] &= wordByte; // add byte. } else { // PIC18F/J int eeshift = (bytePosition / eeMemBytes) * eeMemBytes; for (int reshift = 0; reshift < eeshift; reshift++) { // shift byte into proper position wordByte >>= 8; } Pk2.DeviceBuffers.EEPromMemory[eeAddress] &= wordByte; // add byte. } } } } // Some 18F parts without EEPROM have hex files created with blank EEPROM by MPLAB else if ((byteAddress >= eeAddr) && (eeAddr > 0) && (Pk2.DevFile.PartsList[Pk2.ActivePart].EEMem == 0)) { lineExceedsFlash = false; // don't give too-large file error. } // Config words section ---------------------------------------------------- if ((byteAddress >= Pk2.DevFile.PartsList[Pk2.ActivePart].ConfigAddr) && (configWords > 0)) { int configNum; if (Pk2.FamilyIsPIC32MM()) { configNum = PIC32MM.setConfigWords(((uint)byteAddress - Pk2.DevFile.PartsList[Pk2.ActivePart].ConfigAddr), wordByte); if (configNum >= 0) { lineExceedsFlash = false; configRead = true; configLoaded[configNum] = true; } } else { configNum = (byteAddress - ((int)Pk2.DevFile.PartsList[Pk2.ActivePart].ConfigAddr)) / cfgBytesPerWord; if ((cfgBytesPerWord != bytesPerWord) && (bytePosition > 1)) { // PIC32 wordByte = (wordByte >> 16) & Pk2.DevFile.Families[Pk2.GetActiveFamily()].BlankValue; } if (configNum < Pk2.DevFile.PartsList[Pk2.ActivePart].ConfigWords) { lineExceedsFlash = false; configRead = true; configLoaded[configNum] = true; if (progMem) { // if importing program memory Pk2.DeviceBuffers.ConfigWords[configNum] &= wordByte; // (wordByte & Pk2.DevFile.PartsList[Pk2.ActivePart].ConfigMasks[configNum]); if (Pk2.DevFile.Families[Pk2.GetActiveFamily()].BlankValue == 0xFFF) { // baseline, set OR mask bits Pk2.DeviceBuffers.ConfigWords[configNum] |= Pk2.DevFile.PartsList[Pk2.ActivePart].ConfigMasks[5]; } if (byteAddress < progMemSizeBytes) { // also mask off the word if in program memory. uint orMask = 0; if (Pk2.DevFile.Families[Pk2.GetActiveFamily()].BlankValue == 0xFFFF) {//PIC18J orMask = 0xF000; } else { // PIC24 is currently only other case of config in program mem orMask = (uint)(0xFF0000 | (Pk2.DevFile.PartsList[Pk2.ActivePart].ConfigBlank[configNum] & ~Pk2.DevFile.PartsList[Pk2.ActivePart].ConfigMasks[configNum])); } Pk2.DeviceBuffers.ProgramMemory[arrayAddress] &= // wordByte; (wordByte & Pk2.DevFile.PartsList[Pk2.ActivePart].ConfigBlank[configNum]); // add byte. Pk2.DeviceBuffers.ProgramMemory[arrayAddress] |= orMask; } } } } } // User IDs section --------------------------------------------------------- if (userIDs > 0) { if (byteAddress >= userIDAddr) { int uIDAddress = (int)(byteAddress - userIDAddr) / userIDMemBytes; if (uIDAddress < userIDs) { lineExceedsFlash = false; if (progMem) { // if importing program memory if (userIDMemBytes == bytesPerWord) { // same # hex bytes per EE location as ProgMem location Pk2.DeviceBuffers.UserIDs[uIDAddress] &= wordByte; // add byte. } else { // PIC18F/J, PIC24H/dsPIC33 int uIDshift = (bytePosition / userIDMemBytes) * userIDMemBytes; for (int reshift = 0; reshift < uIDshift; reshift++) { // shift byte into proper position wordByte >>= 8; } Pk2.DeviceBuffers.UserIDs[uIDAddress] &= wordByte; // add byte. } } } } } // ignore data in hex file if (Pk2.DevFile.PartsList[Pk2.ActivePart].IgnoreBytes > 0) { if (byteAddress >= Pk2.DevFile.PartsList[Pk2.ActivePart].IgnoreAddress) { if ( byteAddress < (Pk2.DevFile.PartsList[Pk2.ActivePart].IgnoreAddress + Pk2.DevFile.PartsList[Pk2.ActivePart].IgnoreBytes)) { // if data is in the ignore region, don't do anything with it // but don't generate a "hex file larger than device" warning. lineExceedsFlash = false; } } } // test memory section --------------------------------------------------------- if (FormPICkit2.TestMemoryEnabled && FormPICkit2.TestMemoryOpen) { if (FormPICkit2.formTestMem.HexImportExportTM()) { if ((byteAddress >= Pk2.DevFile.Families[Pk2.GetActiveFamily()].TestMemoryStart) && (Pk2.DevFile.Families[Pk2.GetActiveFamily()].TestMemoryStart > 0) && (FormPICkit2.TestMemoryWords > 0)) { int tmAddress = (int)(byteAddress - Pk2.DevFile.Families[Pk2.GetActiveFamily()].TestMemoryStart) / bytesPerWord; if (tmAddress < FormPICkit2.TestMemoryWords) { lineExceedsFlash = false; FormTestMemory.TestMemory[tmAddress] &= wordByte; // add byte. } } } } } } if (lineExceedsFlash) { fileExceedsFlash = true; } } // end if (recordType == 0) if ((recordType == 2) || (recordType == 4)) { // Segment address if (fileLine.Length >= (11 + (2 * byteCount))) { // skip if line isn't long enough for bytecount. segmentAddress = Int32.Parse(fileLine.Substring(9, 4), System.Globalization.NumberStyles.HexNumber); } if (recordType == 2) { segmentAddress <<= 4; } else { segmentAddress <<= 16; } } // end if ((recordType == 2) || (recordType == 4)) if (recordType == 1) { // end of record break; } if (hexFile.Extension.ToUpper() == ".NUM") { // Only read first line of SQTP file break; } } fileLine = hexRead.ReadLine(); } hexRead.Close(); if (configWords > 0) { if (!configRead) { return Constants.FileRead.noconfig; } for (int cw = 0; cw < configWords; cw++) { if (!configLoaded[cw]) { // apply mask to dsPIC33/PIC24HJ config 8 to make sure JTAG bit is cleared if ((Pk2.DevFile.Families[Pk2.GetActiveFamily()].BlankValue == 0xFFFFFF) && (configWords > 7)) { Pk2.DeviceBuffers.ConfigWords[7] &= Pk2.DevFile.PartsList[Pk2.ActivePart].ConfigMasks[7]; } return Constants.FileRead.partialcfg; } } } if (fileExceedsFlash) { return Constants.FileRead.largemem; } return Constants.FileRead.success; } catch { return Constants.FileRead.failed; } } public static Constants.FileRead ImportBINFile(string filePath) { // for serial EEPROMS only try { FileInfo binDataFile = new FileInfo(filePath); LastWriteTime = binDataFile.LastWriteTime; FileStream binFile = File.OpenRead(filePath); int bytesPerWord = Pk2.DevFile.Families[Pk2.GetActiveFamily()].ProgMemHexBytes; int memLoc = 0; int bytePosition = 0; byte[] fileByte = new byte[1]; while (binFile.Read(fileByte, 0, 1) > 0) { if (memLoc >= Pk2.DevFile.PartsList[Pk2.ActivePart].ProgramMem) return Constants.FileRead.largemem; uint memByte = 0xFFFFFF00 | (uint)fileByte[0]; for (int shift = 0; shift < bytePosition; shift++) { // shift byte into proper position memByte <<= 8; memByte |= 0xFF; // shift in ones. } Pk2.DeviceBuffers.ProgramMemory[memLoc] &= memByte; if (++bytePosition >= bytesPerWord) { memLoc++; bytePosition = 0; } } binFile.Close(); } catch { return Constants.FileRead.failed; } return Constants.FileRead.success; } public static bool ExportHexFile(string filePath, bool progMem, bool eeMem) { if (filePath.Length > 4) { if ((filePath.Substring(filePath.Length - 4).ToUpper() == ".BIN") && Pk2.FamilyIsEEPROM()) { return ExportBINFile(filePath); } } StreamWriter hexFile = new StreamWriter(filePath); // Start with segment zero if (Pk2.DevFile.Families[Pk2.GetActiveFamily()].BlankValue > 0xFFFFFF) { // PIC32 hexFile.WriteLine(":020000041D00DD"); } else { hexFile.WriteLine(":020000040000FA"); } // Program Memory ---------------------------------------------------------------------------- int fileSegment = 0; int fileAddress = 0; int programEnd = Pk2.DeviceBuffers.ProgramMemory.Length; if (Pk2.DevFile.Families[Pk2.GetActiveFamily()].BlankValue > 0xFFFFFF) { // PIC32 fileSegment = (int)(Constants.P32_PROGRAM_FLASH_START_ADDR >> 16); fileAddress = (int)(Constants.P32_PROGRAM_FLASH_START_ADDR & 0xFFFF); programEnd -= (int)Pk2.DevFile.PartsList[Pk2.ActivePart].BootFlash; } int arrayIndex = 0; int bytesPerWord = Pk2.DevFile.Families[Pk2.GetActiveFamily()].ProgMemHexBytes; int arrayIncrement = 16 / bytesPerWord; // # array words per hex line. if (progMem) { do { string hexLine = string.Format(":10{0:X4}00", fileAddress); for (int i = 0; i < arrayIncrement; i++) { // convert entire array word to hex string of 4 bytes. string hexWord = "00000000"; if ((arrayIndex + i) < Pk2.DeviceBuffers.ProgramMemory.Length) { hexWord = string.Format("{0:X8}", Pk2.DeviceBuffers.ProgramMemory[arrayIndex + i]); } for (int j = 0; j < bytesPerWord; j++) { hexLine += hexWord.Substring((6 - 2 * j), 2); } } hexLine += string.Format("{0:X2}", computeChecksum(hexLine)); hexFile.WriteLine(hexLine); fileAddress += 16; arrayIndex += arrayIncrement; // check for segment boundary if ((fileAddress > 0xFFFF) && (arrayIndex < Pk2.DeviceBuffers.ProgramMemory.Length)) { fileSegment += fileAddress >> 16; fileAddress &= 0xFFFF; string segmentLine = string.Format(":02000004{0:X4}", fileSegment); segmentLine += string.Format("{0:X2}", computeChecksum(segmentLine)); hexFile.WriteLine(segmentLine); } } while (arrayIndex < programEnd); } // Boot Memory ---------------------------------------------------------------------------- if ((Pk2.DevFile.PartsList[Pk2.ActivePart].BootFlash > 0) && Pk2.FamilyIsPIC32()) { hexFile.WriteLine(":020000041FC01B"); arrayIndex = programEnd; programEnd = Pk2.DeviceBuffers.ProgramMemory.Length; fileSegment = (int)(Constants.P32_BOOT_FLASH_START_ADDR >> 16); fileAddress = (int)(Constants.P32_BOOT_FLASH_START_ADDR & 0xFFFF); if (progMem) { do { string hexLine = string.Format(":10{0:X4}00", fileAddress); for (int i = 0; i < arrayIncrement; i++) { // convert entire array word to hex string of 4 bytes. string hexWord = string.Format("{0:X8}", Pk2.DeviceBuffers.ProgramMemory[arrayIndex + i]); for (int j = 0; j < bytesPerWord; j++) { hexLine += hexWord.Substring((6 - 2 * j), 2); } } hexLine += string.Format("{0:X2}", computeChecksum(hexLine)); hexFile.WriteLine(hexLine); fileAddress += 16; arrayIndex += arrayIncrement; // check for segment boundary if ((fileAddress > 0xFFFF) && (arrayIndex < Pk2.DeviceBuffers.ProgramMemory.Length)) { fileSegment += fileAddress >> 16; fileAddress &= 0xFFFF; string segmentLine = string.Format(":02000004{0:X4}", fileSegment); segmentLine += string.Format("{0:X2}", computeChecksum(segmentLine)); hexFile.WriteLine(segmentLine); } } while (arrayIndex < programEnd); } } // EEPROM ------------------------------------------------------------------------------------- if (eeMem) { int eeSize = Pk2.DevFile.PartsList[Pk2.ActivePart].EEMem; arrayIndex = 0; if (eeSize > 0) { uint eeAddr = Pk2.DevFile.PartsList[Pk2.ActivePart].EEAddr; if ((eeAddr & 0xFFFF0000) > 0) { // need a segment address string segmentLine = string.Format(":02000004{0:X4}", (eeAddr >> 16)); segmentLine += string.Format("{0:X2}", computeChecksum(segmentLine)); hexFile.WriteLine(segmentLine); } fileAddress = (int)eeAddr & 0xFFFF; int eeBytesPerWord = Pk2.DevFile.Families[Pk2.GetActiveFamily()].EEMemHexBytes; arrayIncrement = 16 / eeBytesPerWord; // # array words per hex line. do { string hexLine = string.Format(":10{0:X4}00", fileAddress); for (int i = 0; i < arrayIncrement; i++) { // convert entire array word to hex string of 4 bytes. string hexWord = string.Format("{0:X8}", Pk2.DeviceBuffers.EEPromMemory[arrayIndex + i]); for (int j = 0; j < eeBytesPerWord; j++) { hexLine += hexWord.Substring((6 - 2 * j), 2); } } hexLine += string.Format("{0:X2}", computeChecksum(hexLine)); hexFile.WriteLine(hexLine); fileAddress += 16; arrayIndex += arrayIncrement; }while (arrayIndex < Pk2.DeviceBuffers.EEPromMemory.Length); } } // Configuration Words ------------------------------------------------------------------------ // timijk 2017.02.10 if (progMem && Pk2.FamilyIsPIC32MM()) { PIC32MM.exportConfigHex(hexFile); } else if (progMem) { int cfgBytesPerWord = bytesPerWord; if (Pk2.DevFile.Families[Pk2.GetActiveFamily()].BlankValue > 0xFFFFFF) { // PIC32 cfgBytesPerWord = 2; } int configWords = Pk2.DevFile.PartsList[Pk2.ActivePart].ConfigWords; if ((configWords > 0) && (Pk2.DevFile.PartsList[Pk2.ActivePart].ConfigAddr > (Pk2.DevFile.PartsList[Pk2.ActivePart].ProgramMem * bytesPerWord))) { // If there are Config words and they aren't at the end of program flash uint configAddr = Pk2.DevFile.PartsList[Pk2.ActivePart].ConfigAddr; if ((configAddr & 0xFFFF0000) > 0) { // need a segment address string segmentLine = string.Format(":02000004{0:X4}", (configAddr >> 16)); segmentLine += string.Format("{0:X2}", computeChecksum(segmentLine)); hexFile.WriteLine(segmentLine); } fileAddress = (int)configAddr & 0xFFFF; int cfgsWritten = 0; for (int lines = 0; lines < (((configWords * cfgBytesPerWord - 1)/16) + 1); lines++) { int cfgsLeft = configWords - cfgsWritten; if (cfgsLeft >= (16 / cfgBytesPerWord)) { cfgsLeft = (16 / cfgBytesPerWord); } string hexLine = string.Format(":{0:X2}{1:X4}00", (cfgsLeft * cfgBytesPerWord), fileAddress); fileAddress += (cfgsLeft * cfgBytesPerWord); for (int i = 0; i < cfgsLeft; i++) { // convert entire array word to hex string of 4 bytes. uint cfgWord = Pk2.DeviceBuffers.ConfigWords[cfgsWritten + i]; if (Pk2.DevFile.Families[Pk2.GetActiveFamily()].BlankValue > 0xFFFFFF) {// PIC32 cfgWord |= ~(uint)Pk2.DevFile.PartsList[Pk2.ActivePart].ConfigMasks[cfgsWritten + i]; cfgWord &= Pk2.DevFile.PartsList[Pk2.ActivePart].ConfigBlank[cfgsWritten + i]; } string hexWord = string.Format("{0:X8}", cfgWord); for (int j = 0; j < cfgBytesPerWord; j++) { hexLine += hexWord.Substring(8 - ((j+1)*2), 2); } } hexLine += string.Format("{0:X2}", computeChecksum(hexLine)); hexFile.WriteLine(hexLine); cfgsWritten += cfgsLeft; } } } // UserIDs ------------------------------------------------------------------------------------ // timijk 2017.02.10 PIC32MM: UserID is done in Configuration Words if (progMem && !Pk2.FamilyIsPIC32MM()) { int userIDs = Pk2.DevFile.PartsList[Pk2.ActivePart].UserIDWords; arrayIndex = 0; if (userIDs > 0) { uint uIDAddr = Pk2.DevFile.PartsList[Pk2.ActivePart].UserIDAddr; if ((uIDAddr & 0xFFFF0000) > 0) { // need a segment address string segmentLine = string.Format(":02000004{0:X4}", (uIDAddr >> 16)); segmentLine += string.Format("{0:X2}", computeChecksum(segmentLine)); hexFile.WriteLine(segmentLine); } fileAddress = (int)uIDAddr & 0xFFFF; int idBytesPerWord = Pk2.DevFile.Families[Pk2.GetActiveFamily()].UserIDHexBytes; arrayIncrement = 16 / idBytesPerWord; // # array words per hex line. string hexLine; do { int remainingBytes = (userIDs - arrayIndex) * idBytesPerWord; if (remainingBytes < 16) { hexLine = string.Format(":{0:X2}{1:X4}00", remainingBytes, fileAddress); arrayIncrement = (userIDs - arrayIndex); } else { hexLine = string.Format(":10{0:X4}00", fileAddress); } for (int i = 0; i < arrayIncrement; i++) { // convert entire array word to hex string of 4 bytes. string hexWord = string.Format("{0:X8}", Pk2.DeviceBuffers.UserIDs[arrayIndex + i]); for (int j = 0; j < idBytesPerWord; j++) { hexLine += hexWord.Substring((6 - 2 * j), 2); } } hexLine += string.Format("{0:X2}", computeChecksum(hexLine)); hexFile.WriteLine(hexLine); fileAddress += 16; arrayIndex += arrayIncrement; } while (arrayIndex < Pk2.DeviceBuffers.UserIDs.Length); } } // Test Memory -------------------------------------------------------------------------------- if (FormPICkit2.TestMemoryEnabled && FormPICkit2.TestMemoryOpen) { if (FormPICkit2.formTestMem.HexImportExportTM()) { int tmSize = FormPICkit2.TestMemoryWords; arrayIndex = 0; if (tmSize > 0) { uint tmAddr = Pk2.DevFile.Families[Pk2.GetActiveFamily()].TestMemoryStart; if ((tmAddr & 0xFFFF0000) > 0) { // need a segment address string segmentLine = string.Format(":02000004{0:X4}", (tmAddr >> 16)); segmentLine += string.Format("{0:X2}", computeChecksum(segmentLine)); hexFile.WriteLine(segmentLine); } fileAddress = (int)tmAddr & 0xFFFF; int tmBytesPerWord = Pk2.DevFile.Families[Pk2.GetActiveFamily()].ProgMemHexBytes; arrayIncrement = 16 / tmBytesPerWord; // # array words per hex line. do { string hexLine = string.Format(":10{0:X4}00", fileAddress); for (int i = 0; i < arrayIncrement; i++) { // convert entire array word to hex string of 4 bytes. string hexWord = string.Format("{0:X8}", FormTestMemory.TestMemory[arrayIndex + i]); for (int j = 0; j < tmBytesPerWord; j++) { hexLine += hexWord.Substring((6 - 2 * j), 2); } } hexLine += string.Format("{0:X2}", computeChecksum(hexLine)); if ((fileAddress != ((int)tmAddr & 0xFFFF)) || (Pk2.GetActiveFamily() != 3)) { // skip User ID line on PIC18F hexFile.WriteLine(hexLine); } fileAddress += 16; arrayIndex += arrayIncrement; } while (arrayIndex < FormPICkit2.TestMemoryWords); } } } //end of record line. hexFile.WriteLine(":00000001FF"); hexFile.Close(); return true; } public static bool ExportBINFile(string filePath) { // for serial EEPROMS only try { FileStream binFile = File.Open(filePath,System.IO.FileMode.Create); int bytesPerWord = Pk2.DevFile.Families[Pk2.GetActiveFamily()].ProgMemHexBytes; for (int memLoc = 0; memLoc < Pk2.DevFile.PartsList[Pk2.ActivePart].ProgramMem; memLoc++) { for (int byteNum = 0; byteNum < bytesPerWord; byteNum++) { byte outputByte = (byte)((Pk2.DeviceBuffers.ProgramMemory[memLoc] >> (8 * byteNum)) & 0xFF); binFile.WriteByte(outputByte); } } binFile.Close(); } catch { return false; } return true; } public static byte computeChecksum(string fileLine) { int byteCount = Int32.Parse(fileLine.Substring(1, 2), System.Globalization.NumberStyles.HexNumber); if (fileLine.Length >= (9 + (2* byteCount))) { // skip if line isn't long enough for bytecount. int checksum = byteCount; for (int i = 0; i < (3 + byteCount); i++) { checksum += Int32.Parse(fileLine.Substring(3 + (2 * i), 2), System.Globalization.NumberStyles.HexNumber); } checksum = 0 - checksum; return (byte)(checksum & 0xFF); } return 0; } } }
using System; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Diagnostics; using System.Globalization; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.VisualStudio.ExtensionsExplorer; using NuGet.VisualStudio; namespace NuGet.Dialog.Providers { [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1001:TypesThatOwnDisposableFieldsShouldBeDisposable")] internal abstract class PackagesTreeNodeBase : IVsExtensionsTreeNode, IVsPageDataSource, IVsSortDataSource, IVsProgressPaneConsumer, INotifyPropertyChanged, IVsMessagePaneConsumer { // The number of extensions to show per page. private const int DefaultItemsPerPage = 10; // We cache the query until it changes (due to sort order or search) private IEnumerable<IPackage> _query; private int _totalCount; #if VS10 private IList<IVsExtension> _extensions; #else private IList _extensions; #endif private IList<IVsExtensionsTreeNode> _nodes; private int _totalPages = 1, _currentPage = 1; private bool _progressPaneActive; private bool _isExpanded; private bool _isSelected; private bool _loadingInProgress; private bool _includePrereleaseWhenLastLoaded; private readonly bool _collapseVersions; private CancellationTokenSource _currentCancellationSource; public event PropertyChangedEventHandler PropertyChanged; public event EventHandler<EventArgs> PageDataChanged; protected PackagesTreeNodeBase(IVsExtensionsTreeNode parent, PackagesProviderBase provider, bool collapseVersions = true) { Debug.Assert(provider != null); _collapseVersions = collapseVersions; Parent = parent; Provider = provider; PageSize = DefaultItemsPerPage; } public bool CollapseVersions { get { return _collapseVersions; } } protected PackagesProviderBase Provider { get; private set; } private IVsProgressPane ProgressPane { get; set; } private IVsMessagePane MessagePane { get; set; } /// <summary> /// Name of this node /// </summary> public abstract string Name { get; } public bool IsSearchResultsNode { get; set; } /// <summary> /// Select node (UI) property /// This property maps to TreeViewItem.IsSelected /// </summary> public bool IsSelected { get { return _isSelected; } set { if (_isSelected != value) { _isSelected = value; OnNotifyPropertyChanged("IsSelected"); } } } public abstract bool SupportsPrereleasePackages { get; } /// <summary> /// Expand node (UI) property /// This property maps to TreeViewItem.IsExpanded /// </summary> public bool IsExpanded { get { return _isExpanded; } set { if (_isExpanded != value) { _isExpanded = value; OnNotifyPropertyChanged("IsExpanded"); } } } /// <summary> /// List of templates at this node for the current page only /// </summary> #if VS10 public IList<IVsExtension> Extensions { #else public IList Extensions { #endif get { if (_extensions == null) { EnsureExtensionCollection(); LoadPage(1); } return _extensions; } } /// <summary> /// Children at this node /// </summary> public IList<IVsExtensionsTreeNode> Nodes { get { if (_nodes == null) { _nodes = new ObservableCollection<IVsExtensionsTreeNode>(); } return _nodes; } } /// <summary> /// Parent of this node /// </summary> public IVsExtensionsTreeNode Parent { get; private set; } public int TotalPages { get { return _totalPages; } internal set { _totalPages = value; NotifyPropertyChanged(); } } public int CurrentPage { get { return _currentPage; } internal set { _currentPage = value; NotifyPropertyChanged(); } } public int TotalNumberOfPackages { get { return _totalCount; } } /// <summary> /// Raised when the current node completes loading packages. /// </summary> internal event EventHandler PackageLoadCompleted = delegate { }; internal int PageSize { get; set; } /// <summary> /// Refresh the list of packages belong to this node /// </summary> public virtual void Refresh(bool resetQueryBeforeRefresh = false) { if (resetQueryBeforeRefresh) { ResetQuery(); } LoadPage(CurrentPage); } public override string ToString() { return Name; } /// <summary> /// Get all packages belonging to this node. /// </summary> /// <returns></returns> public abstract IQueryable<IPackage> GetPackages(string searchTerm, bool allowPrereleaseVersions); /// <summary> /// Helper function to raise property changed events /// </summary> private void NotifyPropertyChanged() { if (PageDataChanged != null) { PageDataChanged(this, EventArgs.Empty); } } /// <summary> /// Loads the packages in the specified page. /// </summary> /// <param name="pageNumber"></param> public void LoadPage(int pageNumber) { if (pageNumber < 1) { throw new ArgumentOutOfRangeException( "pageNumber", String.Format(CultureInfo.CurrentCulture, CommonResources.Argument_Must_Be_GreaterThanOrEqualTo, 1)); } if (_loadingInProgress || Provider.SuppressLoad) { return; } EnsureExtensionCollection(); // Bug #1930: this will clear the content of details pane Extensions.Clear(); ShowProgressPane(); // avoid more than one loading occurring at the same time _loadingInProgress = true; _includePrereleaseWhenLastLoaded = Provider.IncludePrerelease; _currentCancellationSource = new CancellationTokenSource(); TaskScheduler uiScheduler; try { uiScheduler = TaskScheduler.FromCurrentSynchronizationContext(); } catch (InvalidOperationException) { // FromCurrentSynchronizationContext() fails when running from unit test uiScheduler = TaskScheduler.Default; } NuGetEventTrigger.Instance.TriggerEvent(NuGetEvent.PackageLoadBegin); Task.Factory.StartNew( (state) => ExecuteAsync(pageNumber, _currentCancellationSource.Token), _currentCancellationSource, _currentCancellationSource.Token).ContinueWith(QueryExecutionCompleted, uiScheduler); } private void EnsureExtensionCollection() { if (_extensions == null) { _extensions = new ObservableCollection<IVsExtension>(); } } /// <summary> /// Called when user clicks on the Cancel button in the progress pane. /// </summary> private void CancelCurrentExtensionQuery() { Debug.WriteLine("Cancelling pending extensions query."); if (_currentCancellationSource != null) { _currentCancellationSource.Cancel(); _loadingInProgress = false; Provider.RemoveSearchNode(); } } /// <summary> /// This method executes on background thread. /// </summary> [System.Diagnostics.CodeAnalysis.SuppressMessage( "Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "We want to show error message inside the dialog, rather than blowing up VS.")] private LoadPageResult ExecuteAsync(int pageNumber, CancellationToken token) { token.ThrowIfCancellationRequested(); if (_query == null) { IQueryable<IPackage> query = GetPackages(searchTerm: null, allowPrereleaseVersions: Provider.IncludePrerelease); if (CollapseVersions) { query = CollapsePackageVersions(query); } token.ThrowIfCancellationRequested(); // Execute the total count query _totalCount = query.Count(); // make sure we don't query a page that is greater than the maximum page number. int maximumPages = (_totalCount + PageSize - 1)/PageSize; pageNumber = Math.Min(pageNumber, maximumPages); token.ThrowIfCancellationRequested(); IQueryable<IPackage> orderedQuery = ApplyOrdering(query); // Buffer 3 pages _query = orderedQuery.AsBufferedEnumerable(PageSize * 3); if (CollapseVersions) { // If we are connecting to an older gallery implementation, we need to use the Published field. // For newer gallery, the package is never unpublished, it is only unlisted. _query = _query.Where(PackageExtensions.IsListed).AsCollapsed(); } } IList<IPackage> packages = _query.Skip((pageNumber - 1) * PageSize) .Take(PageSize) .ToList(); if (packages.Count < PageSize) { _totalCount = (pageNumber - 1) * PageSize + packages.Count; } token.ThrowIfCancellationRequested(); return new LoadPageResult(packages, pageNumber, _totalCount); } protected virtual IQueryable<IPackage> CollapsePackageVersions(IQueryable<IPackage> packages) { if (Provider.IncludePrerelease && SupportsPrereleasePackages) { return packages.Where(p => p.IsAbsoluteLatestVersion); } else { return packages.Where(p => p.IsLatestVersion); } } protected virtual IQueryable<IPackage> ApplyOrdering(IQueryable<IPackage> query) { // If the default sort is null then fall back to download count IOrderedQueryable<IPackage> result; if (Provider.CurrentSort == null) { result = query.OrderByDescending(p => p.DownloadCount); } else { // Order by the current descriptor result = query.SortBy<IPackage>(Provider.CurrentSort.SortProperties, Provider.CurrentSort.Direction); } return result.ThenBy(p => p.Id); } public IList<IVsSortDescriptor> GetSortDescriptors() { // Get the sort descriptor from the provider return Provider.SortDescriptors; } protected internal void ResetQuery() { _query = null; } public bool SortSelectionChanged(IVsSortDescriptor selectedDescriptor) { Provider.CurrentSort = selectedDescriptor as PackageSortDescriptor; // The value of CurrentSort could be null if we're dealing with the SearchProvider. Use the selectedDescriptor instead since it returns the actual instance. if (selectedDescriptor != null) { // If we changed the sort order then invalidate the cache. ResetQuery(); // Reload the first page since the sort order changed LoadPage(1); return true; } return false; } [System.Diagnostics.CodeAnalysis.SuppressMessage( "Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "We don't want it to crash VS.")] private void QueryExecutionCompleted(Task<LoadPageResult> task) { // If a task throws, the exception must be handled or the Exception // property must be accessed or the exception will tear down the process when finalized Exception exception = task.Exception; if (task.IsFaulted) { try { ExceptionHelper.WriteToActivityLog(exception); } catch { // don't let this crash VS } } var cancellationSource = (CancellationTokenSource)task.AsyncState; if (cancellationSource != _currentCancellationSource) { return; } _loadingInProgress = false; // Only process the result if this node is still selected. if (IsSelected) { if (task.IsCanceled) { HideProgressPane(); } else { LoadPageResult result = task.Result; UpdateNewPackages(result.Packages.ToList()); int totalPages = (result.TotalCount + PageSize - 1) / PageSize; TotalPages = Math.Max(1, totalPages); CurrentPage = Math.Max(1, result.PageNumber); HideProgressPane(); } } Provider.OnPackageLoadCompleted(this); // for unit tests PackageLoadCompleted(this, EventArgs.Empty); NuGetEventTrigger.Instance.TriggerEvent(NuGetEvent.PackageLoadEnd); } private void UpdateNewPackages(IList<IPackage> packages) { int newPackagesIndex = 0; int oldPackagesIndex = 0; while (oldPackagesIndex < _extensions.Count) { if (newPackagesIndex >= packages.Count) { _extensions.RemoveAt(oldPackagesIndex); } else { PackageItem currentOldItem = (PackageItem)_extensions[oldPackagesIndex]; if (PackageEqualityComparer.IdAndVersion.Equals(packages[newPackagesIndex], currentOldItem.PackageIdentity)) { newPackagesIndex++; oldPackagesIndex++; } else { _extensions.RemoveAt(oldPackagesIndex); } } } while (newPackagesIndex < packages.Count) { var extension = Provider.CreateExtension(packages[newPackagesIndex++]); if (extension != null) { _extensions.Add(extension); } } if (_extensions.Count > 0) { // select the first package by default ((IVsExtension)_extensions[0]).IsSelected = true; } } protected void OnNotifyPropertyChanged(string propertyName) { if (PropertyChanged != null) { PropertyChanged(this, new PropertyChangedEventArgs(propertyName)); } } public void SetProgressPane(IVsProgressPane progressPane) { ProgressPane = progressPane; } public void SetMessagePane(IVsMessagePane messagePane) { MessagePane = messagePane; } protected bool ShowProgressPane() { if (ProgressPane != null) { _progressPaneActive = true; return ProgressPane.Show(new CancelProgressCallback(CancelCurrentExtensionQuery), true); } else { return false; } } protected void HideProgressPane() { if (_progressPaneActive && ProgressPane != null) { ProgressPane.Close(); _progressPaneActive = false; } } protected bool ShowMessagePane(string message) { if (MessagePane != null) { MessagePane.SetMessageThreadSafe(message); return MessagePane.Show(); } else { return false; } } /// <summary> /// Called when this node is opened. /// </summary> internal void OnOpened() { if (!Provider.SuppressNextRefresh) { Provider.SelectedNode = this; if (!this.IsSearchResultsNode) { // If user switches back to this node, and the Include Prerelease combox box value // has changed, we need to reload the packages. // // This 'if' statement must come before the next one, so that we favor setting // 'resetQueryBeforeRefresh' to true. if (_includePrereleaseWhenLastLoaded != Provider.IncludePrerelease) { Refresh(resetQueryBeforeRefresh: true); return; } if (Provider.RefreshOnNodeSelection) { Refresh(); } } } } /// <summary> /// Called when thid focu switches away from this node /// </summary> internal virtual void OnClosed() { } } }
// Copyright (c) The Avalonia Project. All rights reserved. // Licensed under the MIT license. See licence.md file in the project root for full license information. using System.Linq; using Avalonia.Controls.Presenters; using Avalonia.Controls.Templates; using Avalonia.Input; using Avalonia.LogicalTree; using Avalonia.Styling; using Avalonia.UnitTests; using Avalonia.VisualTree; using Xunit; using Avalonia.Collections; namespace Avalonia.Controls.UnitTests { public class ListBoxTests { [Fact] public void Should_Use_ItemTemplate_To_Create_Item_Content() { var target = new ListBox { Template = ListBoxTemplate(), Items = new[] { "Foo" }, ItemTemplate = new FuncDataTemplate<string>(_ => new Canvas()), }; Prepare(target); var container = (ListBoxItem)target.Presenter.Panel.Children[0]; Assert.IsType<Canvas>(container.Presenter.Child); } [Fact] public void ListBox_Should_Find_ItemsPresenter_In_ScrollViewer() { var target = new ListBox { Template = ListBoxTemplate(), }; Prepare(target); Assert.IsType<ItemsPresenter>(target.Presenter); } [Fact] public void ListBoxItem_Containers_Should_Be_Generated() { using (UnitTestApplication.Start(TestServices.MockPlatformRenderInterface)) { var items = new[] { "Foo", "Bar", "Baz " }; var target = new ListBox { Template = ListBoxTemplate(), Items = items, }; Prepare(target); var text = target.Presenter.Panel.Children .OfType<ListBoxItem>() .Select(x => x.Presenter.Child) .OfType<TextBlock>() .Select(x => x.Text) .ToList(); Assert.Equal(items, text); } } [Fact] public void LogicalChildren_Should_Be_Set_For_DataTemplate_Generated_Items() { using (UnitTestApplication.Start(TestServices.MockPlatformRenderInterface)) { var target = new ListBox { Template = ListBoxTemplate(), Items = new[] { "Foo", "Bar", "Baz " }, }; Prepare(target); Assert.Equal(3, target.GetLogicalChildren().Count()); foreach (var child in target.GetLogicalChildren()) { Assert.IsType<ListBoxItem>(child); } } } [Fact] public void DataContexts_Should_Be_Correctly_Set() { using (UnitTestApplication.Start(TestServices.MockPlatformRenderInterface)) { var items = new object[] { "Foo", new Item("Bar"), new TextBlock { Text = "Baz" }, new ListBoxItem { Content = "Qux" }, }; var target = new ListBox { Template = ListBoxTemplate(), DataContext = "Base", DataTemplates = new DataTemplates { new FuncDataTemplate<Item>(x => new Button { Content = x }) }, Items = items, }; Prepare(target); var dataContexts = target.Presenter.Panel.Children .Cast<Control>() .Select(x => x.DataContext) .ToList(); Assert.Equal( new object[] { items[0], items[1], "Base", "Base" }, dataContexts); } } [Fact] public void Selection_Should_Be_Cleared_On_Recycled_Items() { var target = new ListBox { Template = ListBoxTemplate(), Items = Enumerable.Range(0, 20).Select(x => $"Item {x}").ToList(), ItemTemplate = new FuncDataTemplate<string>(x => new TextBlock { Height = 10 }), SelectedIndex = 0, }; Prepare(target); // Make sure we're virtualized and first item is selected. Assert.Equal(10, target.Presenter.Panel.Children.Count); Assert.True(((ListBoxItem)target.Presenter.Panel.Children[0]).IsSelected); // Scroll down a page. target.Scroll.Offset = new Vector(0, 10); // Make sure recycled item isn't now selected. Assert.False(((ListBoxItem)target.Presenter.Panel.Children[0]).IsSelected); } private FuncControlTemplate ListBoxTemplate() { return new FuncControlTemplate<ListBox>(parent => new ScrollViewer { Name = "PART_ScrollViewer", Template = ScrollViewerTemplate(), Content = new ItemsPresenter { Name = "PART_ItemsPresenter", [~ItemsPresenter.ItemsProperty] = parent.GetObservable(ItemsControl.ItemsProperty).AsBinding(), [~ItemsPresenter.ItemsPanelProperty] = parent.GetObservable(ItemsControl.ItemsPanelProperty).AsBinding(), [~ItemsPresenter.VirtualizationModeProperty] = parent.GetObservable(ListBox.VirtualizationModeProperty).AsBinding(), } }); } private FuncControlTemplate ListBoxItemTemplate() { return new FuncControlTemplate<ListBoxItem>(parent => new ContentPresenter { Name = "PART_ContentPresenter", [!ContentPresenter.ContentProperty] = parent[!ListBoxItem.ContentProperty], [!ContentPresenter.ContentTemplateProperty] = parent[!ListBoxItem.ContentTemplateProperty], }); } private FuncControlTemplate ScrollViewerTemplate() { return new FuncControlTemplate<ScrollViewer>(parent => new ScrollContentPresenter { Name = "PART_ContentPresenter", [~ScrollContentPresenter.ContentProperty] = parent.GetObservable(ScrollViewer.ContentProperty).AsBinding(), [~~ScrollContentPresenter.ExtentProperty] = parent[~~ScrollViewer.ExtentProperty], [~~ScrollContentPresenter.OffsetProperty] = parent[~~ScrollViewer.OffsetProperty], [~~ScrollContentPresenter.ViewportProperty] = parent[~~ScrollViewer.ViewportProperty], }); } private void Prepare(ListBox target) { // The ListBox needs to be part of a rooted visual tree. var root = new TestRoot(); root.Child = target; // Apply the template to the ListBox itself. target.ApplyTemplate(); // Then to its inner ScrollViewer. var scrollViewer = (ScrollViewer)target.GetVisualChildren().Single(); scrollViewer.ApplyTemplate(); // Then make the ScrollViewer create its child. ((ContentPresenter)scrollViewer.Presenter).UpdateChild(); // Now the ItemsPresenter should be reigstered, so apply its template. target.Presenter.ApplyTemplate(); // Because ListBox items are virtualized we need to do a layout to make them appear. target.Measure(new Size(100, 100)); target.Arrange(new Rect(0, 0, 100, 100)); // Now set and apply the item templates. foreach (ListBoxItem item in target.Presenter.Panel.Children) { item.Template = ListBoxItemTemplate(); item.ApplyTemplate(); item.Presenter.ApplyTemplate(); ((ContentPresenter)item.Presenter).UpdateChild(); } // The items were created before the template was applied, so now we need to go back // and re-arrange everything. foreach (IControl i in target.GetSelfAndVisualDescendents()) { i.InvalidateMeasure(); } target.Arrange(new Rect(0, 0, 100, 100)); } private class Item { public Item(string value) { Value = value; } public string Value { get; } } } }
using System; using System.Threading.Tasks; using System.Web; using System.Net; using System.Text; using System.IO; using System.Threading; using System.Collections.Generic; using System.Security.Cryptography; using System.ComponentModel; using SteamBot.SteamGroups; using SteamKit2; using SteamTrade; using SteamKit2.Internal; using SteamTrade.TradeOffer; using System.Globalization; namespace SteamBot { public class Bot : IDisposable { #region Bot delegates public delegate UserHandler UserHandlerCreator(Bot bot, SteamID id); #endregion #region Private readonly variables private readonly SteamUser.LogOnDetails logOnDetails; private readonly string schemaLang; private readonly string logFile; private readonly Dictionary<SteamID, UserHandler> userHandlers; private readonly Log.LogLevel consoleLogLevel; private readonly Log.LogLevel fileLogLevel; private readonly UserHandlerCreator createHandler; private readonly bool isProccess; private readonly BackgroundWorker botThread; #endregion #region Private variables private Task<Inventory> myInventoryTask; private TradeManager tradeManager; private TradeOfferManager tradeOfferManager; private int tradePollingInterval; private string myUserNonce; private string myUniqueId; private bool cookiesAreInvalid = true; private List<SteamID> friends; private bool disposed = false; #endregion #region Public readonly variables /// <summary> /// Userhandler class bot is running. /// </summary> public readonly string BotControlClass; /// <summary> /// The display name of bot to steam. /// </summary> public readonly string DisplayName; /// <summary> /// The chat response from the config file. /// </summary> public readonly string ChatResponse; /// <summary> /// An array of admins for bot. /// </summary> public readonly IEnumerable<SteamID> Admins; public readonly SteamClient SteamClient; public readonly SteamUser SteamUser; public readonly SteamFriends SteamFriends; public readonly SteamTrading SteamTrade; public readonly SteamGameCoordinator SteamGameCoordinator; public readonly SteamNotifications SteamNotifications; /// <summary> /// The amount of time the bot will trade for. /// </summary> public readonly int MaximumTradeTime; /// <summary> /// The amount of time the bot will wait between user interactions with trade. /// </summary> public readonly int MaximumActionGap; /// <summary> /// The api key of bot. /// </summary> public readonly string ApiKey; public readonly SteamWeb SteamWeb; /// <summary> /// The prefix shown before bot's display name. /// </summary> public readonly string DisplayNamePrefix; /// <summary> /// The instance of the Logger for the bot. /// </summary> public readonly Log Log; #endregion #region Public variables public string AuthCode; public bool IsRunning; /// <summary> /// Is bot fully Logged in. /// Set only when bot did successfully Log in. /// </summary> public bool IsLoggedIn { get; private set; } /// <summary> /// The current trade the bot is in. /// </summary> public Trade CurrentTrade { get; private set; } /// <summary> /// The current game bot is in. /// Default: 0 = No game. /// </summary> public int CurrentGame { get; private set; } #endregion public IEnumerable<SteamID> FriendsList { get { CreateFriendsListIfNecessary(); return friends; } } public Inventory MyInventory { get { myInventoryTask.Wait(); return myInventoryTask.Result; } } /// <summary> /// Compatibility sanity. /// </summary> [Obsolete("Refactored to be Log instead of log")] public Log log { get { return Log; } } public Bot(Configuration.BotInfo config, string apiKey, UserHandlerCreator handlerCreator, bool debug = false, bool process = false) { userHandlers = new Dictionary<SteamID, UserHandler>(); logOnDetails = new SteamUser.LogOnDetails { Username = config.Username, Password = config.Password }; DisplayName = config.DisplayName; ChatResponse = config.ChatResponse; MaximumTradeTime = config.MaximumTradeTime; MaximumActionGap = config.MaximumActionGap; DisplayNamePrefix = config.DisplayNamePrefix; tradePollingInterval = config.TradePollingInterval <= 100 ? 800 : config.TradePollingInterval; schemaLang = config.SchemaLang != null && config.SchemaLang.Length == 2 ? config.SchemaLang.ToLower() : "en"; Admins = config.Admins; ApiKey = !String.IsNullOrEmpty(config.ApiKey) ? config.ApiKey : apiKey; isProccess = process; try { if( config.LogLevel != null ) { consoleLogLevel = (Log.LogLevel)Enum.Parse(typeof(Log.LogLevel), config.LogLevel, true); Console.WriteLine(@"(Console) LogLevel configuration parameter used in bot {0} is depreciated and may be removed in future versions. Please use ConsoleLogLevel instead.", DisplayName); } else consoleLogLevel = (Log.LogLevel)Enum.Parse(typeof(Log.LogLevel), config.ConsoleLogLevel, true); } catch (ArgumentException) { Console.WriteLine(@"(Console) ConsoleLogLevel invalid or unspecified for bot {0}. Defaulting to ""Info""", DisplayName); consoleLogLevel = Log.LogLevel.Info; } try { fileLogLevel = (Log.LogLevel)Enum.Parse(typeof(Log.LogLevel), config.FileLogLevel, true); } catch (ArgumentException) { Console.WriteLine(@"(Console) FileLogLevel invalid or unspecified for bot {0}. Defaulting to ""Info""", DisplayName); fileLogLevel = Log.LogLevel.Info; } logFile = config.LogFile; Log = new Log(logFile, DisplayName, consoleLogLevel, fileLogLevel); createHandler = handlerCreator; BotControlClass = config.BotControlClass; SteamWeb = new SteamWeb(); // Hacking around https ServicePointManager.ServerCertificateValidationCallback += SteamWeb.ValidateRemoteCertificate; Log.Debug ("Initializing Steam Bot..."); SteamClient = new SteamClient(); SteamClient.AddHandler(new SteamNotifications()); SteamTrade = SteamClient.GetHandler<SteamTrading>(); SteamUser = SteamClient.GetHandler<SteamUser>(); SteamFriends = SteamClient.GetHandler<SteamFriends>(); SteamGameCoordinator = SteamClient.GetHandler<SteamGameCoordinator>(); SteamNotifications = SteamClient.GetHandler<SteamNotifications>(); botThread = new BackgroundWorker { WorkerSupportsCancellation = true }; botThread.DoWork += BackgroundWorkerOnDoWork; botThread.RunWorkerCompleted += BackgroundWorkerOnRunWorkerCompleted; botThread.RunWorkerAsync(); } ~Bot() { Dispose(false); } private void CreateFriendsListIfNecessary() { if (friends != null) return; friends = new List<SteamID>(); for (int i = 0; i < SteamFriends.GetFriendCount(); i++) friends.Add(SteamFriends.GetFriendByIndex(i)); } /// <summary> /// Occurs when the bot needs the SteamGuard authentication code. /// </summary> /// <remarks> /// Return the code in <see cref="SteamGuardRequiredEventArgs.SteamGuard"/> /// </remarks> public event EventHandler<SteamGuardRequiredEventArgs> OnSteamGuardRequired; /// <summary> /// Starts the callback thread and connects to Steam via SteamKit2. /// </summary> /// <remarks> /// THIS NEVER RETURNS. /// </remarks> /// <returns><c>true</c>. See remarks</returns> public bool StartBot() { IsRunning = true; Log.Info("Connecting..."); if (!botThread.IsBusy) botThread.RunWorkerAsync(); SteamClient.Connect(); Log.Success("Done Loading Bot!"); return true; // never get here } /// <summary> /// Disconnect from the Steam network and stop the callback /// thread. /// </summary> public void StopBot() { IsRunning = false; Log.Debug("Trying to shut down bot thread."); SteamClient.Disconnect(); botThread.CancelAsync(); while (botThread.IsBusy) Thread.Yield(); userHandlers.Clear(); } /// <summary> /// Creates a new trade with the given partner. /// </summary> /// <returns> /// <c>true</c>, if trade was opened, /// <c>false</c> if there is another trade that must be closed first. /// </returns> public bool OpenTrade (SteamID other) { if (CurrentTrade != null || CheckCookies() == false) return false; SteamTrade.Trade(other); return true; } /// <summary> /// Closes the current active trade. /// </summary> public void CloseTrade() { if (CurrentTrade == null) return; UnsubscribeTrade (GetUserHandler (CurrentTrade.OtherSID), CurrentTrade); tradeManager.StopTrade (); CurrentTrade = null; } void OnTradeTimeout(object sender, EventArgs args) { // ignore event params and just null out the trade. GetUserHandler(CurrentTrade.OtherSID).OnTradeTimeout(); } /// <summary> /// Create a new trade offer with the specified partner /// </summary> /// <param name="other">SteamId of the partner</param> /// <returns></returns> public TradeOffer NewTradeOffer(SteamID other) { return tradeOfferManager.NewOffer(other); } /// <summary> /// Try to get a specific trade offer using the offerid /// </summary> /// <param name="offerId"></param> /// <param name="tradeOffer"></param> /// <returns></returns> public bool TryGetTradeOffer(string offerId, out TradeOffer tradeOffer) { return tradeOfferManager.GetOffer(offerId, out tradeOffer); } public void HandleBotCommand(string command) { try { GetUserHandler(SteamClient.SteamID).OnBotCommand(command); } catch (ObjectDisposedException e) { // Writing to console because odds are the error was caused by a disposed Log. Console.WriteLine(string.Format("Exception caught in BotCommand Thread: {0}", e)); if (!this.IsRunning) { Console.WriteLine("The Bot is no longer running and could not write to the Log. Try Starting this bot first."); } } catch (Exception e) { Console.WriteLine(string.Format("Exception caught in BotCommand Thread: {0}", e)); } } bool HandleTradeSessionStart (SteamID other) { if (CurrentTrade != null) return false; try { tradeManager.InitializeTrade(SteamUser.SteamID, other); CurrentTrade = tradeManager.CreateTrade(SteamUser.SteamID, other); CurrentTrade.OnClose += CloseTrade; SubscribeTrade(CurrentTrade, GetUserHandler(other)); tradeManager.StartTradeThread(CurrentTrade); return true; } catch (SteamTrade.Exceptions.InventoryFetchException) { // we shouldn't get here because the inv checks are also // done in the TradeProposedCallback handler. /*string response = String.Empty; if (ie.FailingSteamId.ConvertToUInt64() == other.ConvertToUInt64()) { response = "Trade failed. Could not correctly fetch your backpack. Either the inventory is inaccessible or your backpack is private."; } else { response = "Trade failed. Could not correctly fetch my backpack."; } SteamFriends.SendChatMessage(other, EChatEntryType.ChatMsg, response); Log.Info ("Bot sent other: {0}", response); CurrentTrade = null;*/ return false; } } public void SetGamePlaying(int id) { var gamePlaying = new SteamKit2.ClientMsgProtobuf<CMsgClientGamesPlayed>(EMsg.ClientGamesPlayed); if (id != 0) gamePlaying.Body.games_played.Add(new CMsgClientGamesPlayed.GamePlayed { game_id = new GameID(id), }); SteamClient.Send(gamePlaying); CurrentGame = id; } void HandleSteamMessage(ICallbackMsg msg) { Log.Debug(msg.ToString()); #region Login msg.Handle<SteamClient.ConnectedCallback> (callback => { Log.Debug ("Connection Callback: {0}", callback.Result); if (callback.Result == EResult.OK) { UserLogOn(); } else { Log.Error ("Failed to connect to Steam Community, trying again..."); SteamClient.Connect (); } }); msg.Handle<SteamUser.LoggedOnCallback> (callback => { Log.Debug("Logged On Callback: {0}", callback.Result); if (callback.Result == EResult.OK) { myUserNonce = callback.WebAPIUserNonce; } else { Log.Error("Login Error: {0}", callback.Result); } if (callback.Result == EResult.AccountLogonDenied) { Log.Interface ("This account is SteamGuard enabled. Enter the code via the `auth' command."); // try to get the steamguard auth code from the event callback var eva = new SteamGuardRequiredEventArgs(); FireOnSteamGuardRequired(eva); if (!String.IsNullOrEmpty(eva.SteamGuard)) logOnDetails.AuthCode = eva.SteamGuard; else logOnDetails.AuthCode = Console.ReadLine(); } if (callback.Result == EResult.InvalidLoginAuthCode) { Log.Interface("The given SteamGuard code was invalid. Try again using the `auth' command."); logOnDetails.AuthCode = Console.ReadLine(); } }); msg.Handle<SteamUser.LoginKeyCallback> (callback => { myUniqueId = callback.UniqueID.ToString(); UserWebLogOn(); if (Trade.CurrentSchema == null) { Log.Info ("Downloading Schema..."); Trade.CurrentSchema = Schema.FetchSchema (ApiKey, schemaLang); Log.Success ("Schema Downloaded!"); } SteamFriends.SetPersonaName (DisplayNamePrefix+DisplayName); SteamFriends.SetPersonaState (EPersonaState.Online); Log.Success ("Steam Bot Logged In Completely!"); GetUserHandler(SteamClient.SteamID).OnLoginCompleted(); }); msg.Handle<SteamUser.WebAPIUserNonceCallback>(webCallback => { Log.Debug("Received new WebAPIUserNonce."); if (webCallback.Result == EResult.OK) { myUserNonce = webCallback.Nonce; UserWebLogOn(); } else { Log.Error("WebAPIUserNonce Error: " + webCallback.Result); } }); msg.Handle<SteamUser.UpdateMachineAuthCallback>( authCallback => OnUpdateMachineAuthCallback(authCallback) ); #endregion #region Friends msg.Handle<SteamFriends.FriendsListCallback>(callback => { foreach (SteamFriends.FriendsListCallback.Friend friend in callback.FriendList) { switch (friend.SteamID.AccountType) { case EAccountType.Clan: if (friend.Relationship == EFriendRelationship.RequestRecipient) { if (GetUserHandler(friend.SteamID).OnGroupAdd()) { AcceptGroupInvite(friend.SteamID); } else { DeclineGroupInvite(friend.SteamID); } } break; default: CreateFriendsListIfNecessary(); if (friend.Relationship == EFriendRelationship.None) { friends.Remove(friend.SteamID); GetUserHandler(friend.SteamID).OnFriendRemove(); RemoveUserHandler(friend.SteamID); } else if (friend.Relationship == EFriendRelationship.RequestRecipient) { if (GetUserHandler(friend.SteamID).OnFriendAdd()) { if (!friends.Contains(friend.SteamID)) { friends.Add(friend.SteamID); } else { Log.Error("Friend was added who was already in friends list: " + friend.SteamID); } SteamFriends.AddFriend(friend.SteamID); } else { SteamFriends.RemoveFriend(friend.SteamID); RemoveUserHandler(friend.SteamID); } } break; } } }); msg.Handle<SteamFriends.FriendMsgCallback> (callback => { EChatEntryType type = callback.EntryType; if (callback.EntryType == EChatEntryType.ChatMsg) { Log.Info ("Chat Message from {0}: {1}", SteamFriends.GetFriendPersonaName (callback.Sender), callback.Message ); GetUserHandler(callback.Sender).OnMessageHandler(callback.Message, type); } }); #endregion #region Group Chat msg.Handle<SteamFriends.ChatMsgCallback>(callback => { GetUserHandler(callback.ChatterID).OnChatRoomMessage(callback.ChatRoomID, callback.ChatterID, callback.Message); }); #endregion #region Trading msg.Handle<SteamTrading.SessionStartCallback> (callback => { bool started = HandleTradeSessionStart (callback.OtherClient); if (!started) Log.Error ("Could not start the trade session."); else Log.Debug ("SteamTrading.SessionStartCallback handled successfully. Trade Opened."); }); msg.Handle<SteamTrading.TradeProposedCallback> (callback => { if (CheckCookies() == false) { SteamTrade.RespondToTrade(callback.TradeID, false); return; } try { tradeManager.InitializeTrade(SteamUser.SteamID, callback.OtherClient); } catch (WebException we) { SteamFriends.SendChatMessage(callback.OtherClient, EChatEntryType.ChatMsg, "Trade error: " + we.Message); SteamTrade.RespondToTrade(callback.TradeID, false); return; } catch (Exception) { SteamFriends.SendChatMessage(callback.OtherClient, EChatEntryType.ChatMsg, "Trade declined. Could not correctly fetch your backpack."); SteamTrade.RespondToTrade(callback.TradeID, false); return; } //if (tradeManager.OtherInventory.IsPrivate) //{ // SteamFriends.SendChatMessage(callback.OtherClient, // EChatEntryType.ChatMsg, // "Trade declined. Your backpack cannot be private."); // SteamTrade.RespondToTrade (callback.TradeID, false); // return; //} if (CurrentTrade == null && GetUserHandler (callback.OtherClient).OnTradeRequest ()) SteamTrade.RespondToTrade (callback.TradeID, true); else SteamTrade.RespondToTrade (callback.TradeID, false); }); msg.Handle<SteamTrading.TradeResultCallback> (callback => { if (callback.Response == EEconTradeResponse.Accepted) { Log.Debug("Trade Status: {0}", callback.Response); Log.Info ("Trade Accepted!"); GetUserHandler(callback.OtherClient).OnTradeRequestReply(true, callback.Response.ToString()); } else { Log.Warn("Trade failed: {0}", callback.Response); CloseTrade (); GetUserHandler(callback.OtherClient).OnTradeRequestReply(false, callback.Response.ToString()); } }); #endregion #region Disconnect msg.Handle<SteamUser.LoggedOffCallback> (callback => { IsLoggedIn = false; Log.Warn("Logged off Steam. Reason: {0}", callback.Result); }); msg.Handle<SteamClient.DisconnectedCallback> (callback => { if(IsLoggedIn) { IsLoggedIn = false; CloseTrade(); Log.Warn("Disconnected from Steam Network!"); } SteamClient.Connect (); }); #endregion #region Notifications msg.Handle<SteamBot.SteamNotifications.NotificationCallback>(callback => { //currently only appears to be of trade offer if (callback.Notifications.Count != 0) { foreach (var notification in callback.Notifications) { Log.Info(notification.UserNotificationType + " notification"); } } // Get offers only if cookies are valid if (CheckCookies()) tradeOfferManager.GetOffers(); }); msg.Handle<SteamBot.SteamNotifications.CommentNotificationCallback>(callback => { //various types of comment notifications on profile/activity feed etc //Log.Info("received CommentNotificationCallback"); //Log.Info("New Commments " + callback.CommentNotifications.CountNewComments); //Log.Info("New Commments Owners " + callback.CommentNotifications.CountNewCommentsOwner); //Log.Info("New Commments Subscriptions" + callback.CommentNotifications.CountNewCommentsSubscriptions); }); #endregion } void UserLogOn() { // get sentry file which has the machine hw info saved // from when a steam guard code was entered Directory.CreateDirectory(System.IO.Path.Combine(System.Windows.Forms.Application.StartupPath, "sentryfiles")); FileInfo fi = new FileInfo(System.IO.Path.Combine("sentryfiles",String.Format("{0}.sentryfile", logOnDetails.Username))); if (fi.Exists && fi.Length > 0) logOnDetails.SentryFileHash = SHAHash(File.ReadAllBytes(fi.FullName)); else logOnDetails.SentryFileHash = null; SteamUser.LogOn(logOnDetails); } void UserWebLogOn() { do { IsLoggedIn = SteamWeb.Authenticate(myUniqueId, SteamClient, myUserNonce); if(!IsLoggedIn) { Log.Warn("Authentication failed, retrying in 2s..."); Thread.Sleep(2000); } } while(!IsLoggedIn); Log.Success("User Authenticated!"); tradeManager = new TradeManager(ApiKey, SteamWeb); tradeManager.SetTradeTimeLimits(MaximumTradeTime, MaximumActionGap, tradePollingInterval); tradeManager.OnTimeout += OnTradeTimeout; tradeOfferManager = new TradeOfferManager(ApiKey, SteamWeb); SubscribeTradeOffer(tradeOfferManager); cookiesAreInvalid = false; // Success, check trade offers which we have received while we were offline tradeOfferManager.GetOffers(); } /// <summary> /// Checks if sessionId and token cookies are still valid. /// Sets cookie flag if they are invalid. /// </summary> /// <returns>true if cookies are valid; otherwise false</returns> bool CheckCookies() { // We still haven't re-authenticated if (cookiesAreInvalid) return false; try { if (!SteamWeb.VerifyCookies()) { // Cookies are no longer valid Log.Warn("Cookies are invalid. Need to re-authenticate."); cookiesAreInvalid = true; SteamUser.RequestWebAPIUserNonce(); return false; } } catch { // Even if exception is caught, we should still continue. Log.Warn("Cookie check failed. http://steamcommunity.com is possibly down."); } return true; } UserHandler GetUserHandler(SteamID sid) { if (!userHandlers.ContainsKey(sid)) userHandlers[sid] = createHandler(this, sid); return userHandlers[sid]; } void RemoveUserHandler(SteamID sid) { if (userHandlers.ContainsKey(sid)) userHandlers.Remove(sid); } static byte [] SHAHash (byte[] input) { SHA1Managed sha = new SHA1Managed(); byte[] output = sha.ComputeHash( input ); sha.Clear(); return output; } void OnUpdateMachineAuthCallback(SteamUser.UpdateMachineAuthCallback machineAuth) { byte[] hash = SHAHash (machineAuth.Data); Directory.CreateDirectory(System.IO.Path.Combine(System.Windows.Forms.Application.StartupPath, "sentryfiles")); File.WriteAllBytes (System.IO.Path.Combine("sentryfiles", String.Format("{0}.sentryfile", logOnDetails.Username)), machineAuth.Data); var authResponse = new SteamUser.MachineAuthDetails { BytesWritten = machineAuth.BytesToWrite, FileName = machineAuth.FileName, FileSize = machineAuth.BytesToWrite, Offset = machineAuth.Offset, SentryFileHash = hash, // should be the sha1 hash of the sentry file we just wrote OneTimePassword = machineAuth.OneTimePassword, // not sure on this one yet, since we've had no examples of steam using OTPs LastError = 0, // result from win32 GetLastError Result = EResult.OK, // if everything went okay, otherwise ~who knows~ JobID = machineAuth.JobID, // so we respond to the correct server job }; // send off our response SteamUser.SendMachineAuthResponse (authResponse); } /// <summary> /// Gets the bot's inventory and stores it in MyInventory. /// </summary> /// <example> This sample shows how to find items in the bot's inventory from a user handler. /// <code> /// Bot.GetInventory(); // Get the inventory first /// foreach (var item in Bot.MyInventory.Items) /// { /// if (item.Defindex == 5021) /// { /// // Bot has a key in its inventory /// } /// } /// </code> /// </example> public void GetInventory() { myInventoryTask = Task.Factory.StartNew((Func<Inventory>) FetchBotsInventory); } public void TradeOfferRouter(TradeOffer offer) { if (offer.OfferState == TradeOfferState.TradeOfferStateActive) { GetUserHandler(offer.PartnerSteamId).OnNewTradeOffer(offer); } } public void SubscribeTradeOffer(TradeOfferManager tradeOfferManager) { tradeOfferManager.OnNewTradeOffer += TradeOfferRouter; } //todo: should unsubscribe eventually... public void UnsubscribeTradeOffer(TradeOfferManager tradeOfferManager) { tradeOfferManager.OnNewTradeOffer -= TradeOfferRouter; } /// <summary> /// Subscribes all listeners of this to the trade. /// </summary> public void SubscribeTrade (Trade trade, UserHandler handler) { trade.OnSuccess += handler.OnTradeSuccess; trade.OnAwaitingEmailConfirmation += handler.OnTradeAwaitingEmailConfirmation; trade.OnClose += handler.OnTradeClose; trade.OnError += handler.OnTradeError; trade.OnStatusError += handler.OnStatusError; //trade.OnTimeout += OnTradeTimeout; trade.OnAfterInit += handler.OnTradeInit; trade.OnUserAddItem += handler.OnTradeAddItem; trade.OnUserRemoveItem += handler.OnTradeRemoveItem; trade.OnMessage += handler.OnTradeMessageHandler; trade.OnUserSetReady += handler.OnTradeReadyHandler; trade.OnUserAccept += handler.OnTradeAcceptHandler; } /// <summary> /// Unsubscribes all listeners of this from the current trade. /// </summary> public void UnsubscribeTrade (UserHandler handler, Trade trade) { trade.OnSuccess -= handler.OnTradeSuccess; trade.OnAwaitingEmailConfirmation -= handler.OnTradeAwaitingEmailConfirmation; trade.OnClose -= handler.OnTradeClose; trade.OnError -= handler.OnTradeError; trade.OnStatusError -= handler.OnStatusError; //Trade.OnTimeout -= OnTradeTimeout; trade.OnAfterInit -= handler.OnTradeInit; trade.OnUserAddItem -= handler.OnTradeAddItem; trade.OnUserRemoveItem -= handler.OnTradeRemoveItem; trade.OnMessage -= handler.OnTradeMessageHandler; trade.OnUserSetReady -= handler.OnTradeReadyHandler; trade.OnUserAccept -= handler.OnTradeAcceptHandler; } /// <summary> /// Fetch the Bot's inventory and log a warning if it's private /// </summary> private Inventory FetchBotsInventory() { var inventory = Inventory.FetchInventory(SteamUser.SteamID, ApiKey, SteamWeb); if(inventory.IsPrivate) { log.Warn("The bot's backpack is private! If your bot adds any items it will fail! Your bot's backpack should be Public."); } return inventory; } #region Background Worker Methods private void BackgroundWorkerOnRunWorkerCompleted(object sender, RunWorkerCompletedEventArgs runWorkerCompletedEventArgs) { if (runWorkerCompletedEventArgs.Error != null) { Exception ex = runWorkerCompletedEventArgs.Error; Log.Error("Unhandled exceptions in bot {0} callback thread: {1} {2}", DisplayName, Environment.NewLine, ex); Log.Info("This bot died. Stopping it.."); //backgroundWorker.RunWorkerAsync(); //Thread.Sleep(10000); StopBot(); //StartBot(); } } private void BackgroundWorkerOnDoWork(object sender, DoWorkEventArgs doWorkEventArgs) { ICallbackMsg msg; while (!botThread.CancellationPending) { try { msg = SteamClient.WaitForCallback(true); HandleSteamMessage(msg); } catch (WebException e) { Log.Error("URI: {0} >> {1}", (e.Response != null && e.Response.ResponseUri != null ? e.Response.ResponseUri.ToString() : "unknown"), e.ToString()); System.Threading.Thread.Sleep(45000);//Steam is down, retry in 45 seconds. } catch (Exception e) { Log.Error(e.ToString()); Log.Warn("Restarting bot..."); } } } #endregion Background Worker Methods private void FireOnSteamGuardRequired(SteamGuardRequiredEventArgs e) { // Set to null in case this is another attempt this.AuthCode = null; EventHandler<SteamGuardRequiredEventArgs> handler = OnSteamGuardRequired; if (handler != null) handler(this, e); else { while (true) { if (this.AuthCode != null) { e.SteamGuard = this.AuthCode; break; } Thread.Sleep(5); } } } #region Group Methods /// <summary> /// Accepts the invite to a Steam Group /// </summary> /// <param name="group">SteamID of the group to accept the invite from.</param> private void AcceptGroupInvite(SteamID group) { var AcceptInvite = new ClientMsg<CMsgGroupInviteAction>((int)EMsg.ClientAcknowledgeClanInvite); AcceptInvite.Body.GroupID = group.ConvertToUInt64(); AcceptInvite.Body.AcceptInvite = true; this.SteamClient.Send(AcceptInvite); } /// <summary> /// Declines the invite to a Steam Group /// </summary> /// <param name="group">SteamID of the group to decline the invite from.</param> private void DeclineGroupInvite(SteamID group) { var DeclineInvite = new ClientMsg<CMsgGroupInviteAction>((int)EMsg.ClientAcknowledgeClanInvite); DeclineInvite.Body.GroupID = group.ConvertToUInt64(); DeclineInvite.Body.AcceptInvite = false; this.SteamClient.Send(DeclineInvite); } /// <summary> /// Invites a use to the specified Steam Group /// </summary> /// <param name="user">SteamID of the user to invite.</param> /// <param name="groupId">SteamID of the group to invite the user to.</param> public void InviteUserToGroup(SteamID user, SteamID groupId) { var InviteUser = new ClientMsg<CMsgInviteUserToGroup>((int)EMsg.ClientInviteUserToClan); InviteUser.Body.GroupID = groupId.ConvertToUInt64(); InviteUser.Body.Invitee = user.ConvertToUInt64(); InviteUser.Body.UnknownInfo = true; this.SteamClient.Send(InviteUser); } #endregion public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } private void Dispose(bool disposing) { if (disposed) return; StopBot(); if (disposing) Log.Dispose(); disposed = true; } } }
// // Copyright (C) DataStax Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // using System; using System.Collections; using System.Collections.Generic; using System.Globalization; using System.Linq; using System.Runtime.Serialization; using System.Text.RegularExpressions; using Newtonsoft.Json; using Newtonsoft.Json.Linq; namespace Cassandra.Geometry { /// <summary> /// Represents is a plane geometry figure that is bounded by a finite chain of straight line segments closing in a /// loop to form a closed chain or circuit. /// </summary> [Serializable] [JsonConverter(typeof(PolygonJsonConverter))] public class Polygon : GeometryBase { private static readonly Regex WktRegex = new Regex( @"^POLYGON ?\((\(.*\))\)$", RegexOptions.Compiled); private IList<IList<Point>> _ringsWithOrderedPoints; /// <summary> /// A read-only list describing the rings of the polygon. /// </summary> public IList<IList<Point>> Rings { get; private set; } /// <inheritdoc /> protected override IEnumerable GeoCoordinates { get { return Rings.Select(r => r.Select(p => new[] { p.X, p.Y })); } } /// <summary> /// Creates a new instance of <see cref="Polygon"/> with a single ring. /// </summary> /// <param name="points">The points of the single ring</param> public Polygon(params Point[] points) : this((IList<IList<Point>>) new[] { (IList<Point>)points }) { } /// <summary> /// Creates a new instance of <see cref="Polygon"/> with a sequence of rings. /// </summary> /// <param name="points">The points of the single ring</param> public Polygon(params IList<Point>[] points) : this((IList<IList<Point>>) points) { } /// <summary> /// Creates a new instance of <see cref="Polygon"/> with no rings (empty). /// </summary> public Polygon() : this((IList<IList<Point>>) new IList<Point>[0]) { } /// <summary> /// Creates a new instance of <see cref="Polygon"/> using multiple rings. /// </summary> /// <param name="rings">The polygon rings</param> public Polygon(IList<IList<Point>> rings) { if (rings == null) { throw new ArgumentNullException("rings"); } Rings = AsReadOnlyCollection(rings, r => AsReadOnlyCollection(r)); _ringsWithOrderedPoints = Rings.Select(r => (IList<Point>) r.OrderBy(p => p).ToList()).ToList(); } /// <summary> /// Creates a new instance of <see cref="Polygon"/> using serialization information. /// </summary> protected Polygon(SerializationInfo info, StreamingContext context) { var coordinates = (double[][][])info.GetValue("coordinates", typeof(double[][][])); Rings = AsReadOnlyCollection(coordinates .Select(r => (IList<Point>)r.Select(p => new Point(p[0], p[1])).ToList()) .ToList()); _ringsWithOrderedPoints = Rings.Select(r => (IList<Point>) r.OrderBy(p => p).ToList()).ToList(); } internal Polygon(JObject obj) { var coordinates = obj.GetValue("coordinates").ToObject<double[][][]>(); Rings = AsReadOnlyCollection(coordinates .Select(r => (IList<Point>)r.Select(p => new Point(p[0], p[1])).ToList()) .ToList()); _ringsWithOrderedPoints = Rings.Select(r => (IList<Point>) r.OrderBy(p => p).ToList()).ToList(); } /// <summary> /// Returns a value indicating whether this instance and a specified object represent the same value. /// </summary> public override bool Equals(object obj) { var other = obj as Polygon; if (other == null) { return false; } if (Rings.Count != other.Rings.Count) { return false; } for (var i = 0; i < Rings.Count; i++) { var r1 = _ringsWithOrderedPoints[i]; var r2 = other._ringsWithOrderedPoints[i]; if (!r1.SequenceEqual(r2)) { return false; } } return true; } /// <summary> /// Returns the hash code based on the value of this instance. /// </summary> public override int GetHashCode() { // ReSharper disable once NonReadonlyMemberInGetHashCode return CombineHashCode(_ringsWithOrderedPoints.Select(r => CombineHashCode(r.Select(p => p.GetHashCode())))); } /// <summary> /// Returns Well-known text (WKT) representation of the geometry object. /// </summary> public override string ToString() { if (Rings.Count == 0) { return "POLYGON EMPTY"; } return string.Format("POLYGON ({0})", string.Join(", ", Rings.Select(r => "(" + string.Join(", ", r.Select(p => p.X.ToString(CultureInfo.InvariantCulture) + " " + p.Y.ToString(CultureInfo.InvariantCulture))) + ")"))); } /// <summary> /// Creates a <see cref="Polygon"/> instance from a /// <see href="https://en.wikipedia.org/wiki/Well-known_text">Well-known Text(WKT)</see> /// representation of a polygon. /// </summary> public static Polygon Parse(string textValue) { if (textValue == null) { throw new ArgumentNullException("textValue"); } if (textValue == "POLYGON EMPTY") { return new Polygon(); } Action<bool> validateWkt = condition => { if (condition) { throw InvalidFormatException(textValue); } }; var match = WktRegex.Match(textValue); validateWkt(!match.Success || match.Groups.Count != 2); var ringsText = match.Groups[1].Value; var ringsArray = new LinkedList<string>(); var ringStart = -1; for (var i = 0; i < ringsText.Length; i++) { var c = ringsText[i]; if (c == '(') { validateWkt(ringStart != -1); ringStart = i + 1; continue; } if (c == ')') { validateWkt(ringStart == -1); ringsArray.AddLast(ringsText.Substring(ringStart, i - ringStart)); ringStart = -1; continue; } validateWkt(ringStart == -1 && c != ' ' && c != ','); } var lines = ringsArray.Select(r => (IList<Point>)LineString.ParseSegments(r)).ToList(); return new Polygon(lines); } } }
//---------------------------------------------------------------- // Copyright (c) Microsoft Corporation. All rights reserved. //---------------------------------------------------------------- namespace System.Activities.Presentation.View { using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Collections.Specialized; using System.ComponentModel; using System.Globalization; using System.Linq; using System.Windows; using System.Windows.Automation.Peers; using System.Windows.Controls; using System.Windows.Data; using System.Windows.Input; using System.Runtime; using System.Windows.Automation; using System.Diagnostics.CodeAnalysis; using System.Windows.Threading; using System.Activities.Presentation.Hosting; using Microsoft.Activities.Presentation; // This control presents a System.Type as textblock, which is editable on click, or F2. public sealed partial class TypePresenter : ContentControl, INotifyPropertyChanged { public static readonly DependencyProperty ContextProperty = DependencyProperty.Register("Context", typeof(EditingContext), typeof(TypePresenter), new PropertyMetadata(new PropertyChangedCallback(OnContextChanged))); public static readonly DependencyProperty AllowNullProperty = DependencyProperty.Register("AllowNull", typeof(bool), typeof(TypePresenter), new PropertyMetadata(false, OnAllowNullChanged)); public static readonly DependencyProperty BrowseTypeDirectlyProperty = DependencyProperty.Register("BrowseTypeDirectly", typeof(bool), typeof(TypePresenter), new PropertyMetadata(false, OnBrowseTypeDirectlyChanged)); public static readonly DependencyProperty TypeProperty = DependencyProperty.Register("Type", typeof(Type), typeof(TypePresenter), new PropertyMetadata(null, new PropertyChangedCallback(OnTypeChanged))); public static readonly DependencyProperty LabelProperty = DependencyProperty.Register("Label", typeof(string), typeof(TypePresenter), new PropertyMetadata(string.Empty)); public static readonly DependencyProperty FilterProperty = DependencyProperty.Register("Filter", typeof(Func<Type, bool>), typeof(TypePresenter), new PropertyMetadata(new PropertyChangedCallback(OnFilterChanged))); static readonly DependencyPropertyKey TextPropertyKey = DependencyProperty.RegisterReadOnly( "Text", typeof(string), typeof(TypePresenter), new UIPropertyMetadata(null)); public static readonly DependencyProperty TextProperty = TextPropertyKey.DependencyProperty; public static readonly DependencyProperty MostRecentlyUsedTypesProperty = DependencyProperty.Register("MostRecentlyUsedTypes", typeof(ObservableCollection<Type>), typeof(TypePresenter), new PropertyMetadata(TypePresenter.DefaultMostRecentlyUsedTypes, new PropertyChangedCallback(OnMostRecentlyUsedTypesPropertyChanged), new CoerceValueCallback(OnCoerceMostRecentlyUsedTypes))); public static readonly DependencyProperty CenterActivityTypeResolverDialogProperty = DependencyProperty.Register("CenterActivityTypeResolverDialog", typeof(bool), typeof(TypePresenter), new PropertyMetadata(true)); public static readonly DependencyProperty CenterTypeBrowserDialogProperty = DependencyProperty.Register("CenterTypeBrowserDialog", typeof(bool), typeof(TypePresenter), new PropertyMetadata(true)); public static readonly RoutedEvent TypeBrowserOpenedEvent = EventManager.RegisterRoutedEvent( "TypeBrowserOpened", RoutingStrategy.Bubble, typeof(RoutedEventHandler), typeof(TypePresenter)); public static readonly RoutedEvent TypeBrowserClosedEvent = EventManager.RegisterRoutedEvent( "TypeBrowserClosed", RoutingStrategy.Bubble, typeof(RoutedEventHandler), typeof(TypePresenter)); public static readonly RoutedEvent TypeChangedEvent = EventManager.RegisterRoutedEvent( "TypeChanged", RoutingStrategy.Bubble, typeof(RoutedEventHandler), typeof(TypePresenter)); static List<Type> defaultTypes = null; static ObservableCollection<Type> defaultMostRecentlyUsedTypes; internal static List<Type> DefaultTypes { get { if (defaultTypes == null) { defaultTypes = new List<Type> { typeof(Boolean), typeof(Int32), typeof(String), typeof(Object), }; } return defaultTypes; } } public static ObservableCollection<Type> DefaultMostRecentlyUsedTypes { get { if (defaultMostRecentlyUsedTypes == null) { defaultMostRecentlyUsedTypes = new ObservableCollection<Type>(DefaultTypes); } return defaultMostRecentlyUsedTypes; } } [SuppressMessage(FxCop.Category.Usage, FxCop.Rule.CollectionPropertiesShouldBeReadOnly, Justification = "Setter is provided to bind data on this property.")] [Fx.Tag.KnownXamlExternal] public ObservableCollection<Type> MostRecentlyUsedTypes { get { return (ObservableCollection<Type>)GetValue(MostRecentlyUsedTypesProperty); } set { SetValue(MostRecentlyUsedTypesProperty, value); } } bool isMouseLeftButtonDown = true; Type lastSelection; TypeWrapper nullTypeWrapper = null; public TypePresenter() { InitializeComponent(); OnBrowseTypeDirectlyChanged(this, new DependencyPropertyChangedEventArgs( TypePresenter.BrowseTypeDirectlyProperty, false, this.BrowseTypeDirectly)); DisableEdit(); this.typeComboBox.DropDownClosed += OnTypePresenterDropDownClosed; this.typeComboBox.PreviewLostKeyboardFocus += OnTypePresenterComboBoxPreviewLostKeyboardFocus; this.typeComboBox.LostFocus += OnTypePresenterComboBoxLostFocus; this.typeComboBox.KeyDown += OnTypePresenterKeyDown; Binding textToType = new Binding(); textToType.Converter = new TypeWrapperConverter(this); textToType.Source = this; textToType.Path = new PropertyPath(TypeProperty); this.typeComboBox.SetBinding(ComboBox.SelectedItemProperty, textToType); this.lastSelection = (Type)TypeProperty.DefaultMetadata.DefaultValue; MultiBinding automationNameBinding = new MultiBinding(); Binding labelBinding = new Binding("Label"); labelBinding.Source = this; automationNameBinding.Bindings.Add(labelBinding); Binding typeBinding = new Binding("Text"); typeBinding.Source = this.typeTextBlock; automationNameBinding.Bindings.Add(typeBinding); automationNameBinding.Converter = new AutomationNameConverter(); this.SetBinding(AutomationProperties.NameProperty, automationNameBinding); this.Loaded += new RoutedEventHandler(TypePresenter_Loaded); this.Unloaded += new RoutedEventHandler(TypePresenter_Unloaded); } void OnTypePresenterComboBoxPreviewLostKeyboardFocus(object sender, KeyboardFocusChangedEventArgs e) { if (this.typeComboBox.Visibility == Visibility.Visible && this.typeComboBox.IsDropDownOpen) { e.Handled = true; } } void OnTypePresenterComboBoxLostFocus(object sender, RoutedEventArgs e) { TypeWrapper tw = (TypeWrapper)this.typeComboBox.SelectedItem; if (tw != null) { if (tw.Type == typeof(ArrayOf<>) || tw.Type == typeof(BrowseForType)) { SetComboBoxToLastSelection(); } } } void SetComboBoxToLastSelection() { if (this.lastSelection == null) { this.typeComboBox.SelectedIndex = this.typeComboBox.Items.IndexOf(this.NullTypeWrapper); } else { for (int i = 0; i < this.typeComboBox.Items.Count; i++) { TypeWrapper typeWrapper = (TypeWrapper)this.typeComboBox.Items.GetItemAt(i); if (typeWrapper.IsTypeDefinition && Type.Equals(this.lastSelection, typeWrapper.Type)) { this.typeComboBox.SelectedIndex = i; break; } } } } public void FocusOnVisibleControl() { if (BrowseTypeDirectly) { this.Dispatcher.BeginInvoke(DispatcherPriority.ApplicationIdle, (Action)(() => { Keyboard.Focus(this.typeTextBlock); })); } else { this.Dispatcher.BeginInvoke(DispatcherPriority.ApplicationIdle, (Action)(() => { Keyboard.Focus(this.typeComboBox); })); } } void TypePresenter_Loaded(object sender, RoutedEventArgs e) { //UnRegistering because of 137896: Inside tab control multiple Loaded events happen without an Unloaded event. this.MostRecentlyUsedTypes.CollectionChanged -= OnMostRecentlyUsedTypesChanged; this.MostRecentlyUsedTypes.CollectionChanged += OnMostRecentlyUsedTypesChanged; OnMostRecentlyUsedTypesChanged(this, null); } void TypePresenter_Unloaded(object sender, RoutedEventArgs e) { this.MostRecentlyUsedTypes.CollectionChanged -= OnMostRecentlyUsedTypesChanged; } public event RoutedEventHandler TypeBrowserOpened { add { this.AddHandler(TypeBrowserOpenedEvent, value); } remove { this.RemoveHandler(TypeBrowserOpenedEvent, value); } } public event RoutedEventHandler TypeBrowserClosed { add { this.AddHandler(TypeBrowserClosedEvent, value); } remove { this.RemoveHandler(TypeBrowserClosedEvent, value); } } public event RoutedEventHandler TypeChanged { add { this.AddHandler(TypeChangedEvent, value); } remove { this.RemoveHandler(TypeChangedEvent, value); } } public event PropertyChangedEventHandler PropertyChanged; [Fx.Tag.KnownXamlExternal] public EditingContext Context { get { return (EditingContext)GetValue(ContextProperty); } set { SetValue(ContextProperty, value); } } public bool AllowNull { get { return (bool)GetValue(AllowNullProperty); } set { SetValue(AllowNullProperty, value); } } public string Label { get { return (string)GetValue(LabelProperty); } set { SetValue(LabelProperty, value); } } [Fx.Tag.KnownXamlExternal] public Func<Type, bool> Filter { get { return (Func<Type, bool>)GetValue(FilterProperty); } set { SetValue(FilterProperty, value); } } public bool CenterActivityTypeResolverDialog { get { return (bool)GetValue(CenterActivityTypeResolverDialogProperty); } set { SetValue(CenterActivityTypeResolverDialogProperty, value); } } public bool CenterTypeBrowserDialog { get { return (bool)GetValue(CenterTypeBrowserDialogProperty); } set { SetValue(CenterTypeBrowserDialogProperty, value); } } internal TypeWrapper NullTypeWrapper { get { if (this.nullTypeWrapper == null) { this.nullTypeWrapper = new TypeWrapper(NullString, "Null", null); } return this.nullTypeWrapper; } } public string Text { get { return (string)GetValue(TextProperty); } private set { SetValue(TextPropertyKey, value); } } public IEnumerable<TypeWrapper> Items { get { if (AllowNull) { yield return this.NullTypeWrapper; } foreach (Type type in this.MostRecentlyUsedTypes) { if (type != null) { if (this.Filter == null || this.Filter(type)) { yield return new TypeWrapper(type); } } } //display Array of [T] option if (this.Filter == null || this.Filter(typeof(Array))) { yield return new TypeWrapper("Array of [T]", "T[]", typeof(ArrayOf<>)); } //display "Browse for types" option //if there are referenced and local assembly info in Editing context (inside VS), type browser will show those assemblies, //otherwise (standalone), type browser will just show all loaded assemblies in current appdomain yield return new TypeWrapper(BrowseTypeString, "BrowseForTypes", typeof(BrowseForType)); } } public bool BrowseTypeDirectly { get { return (bool)GetValue(BrowseTypeDirectlyProperty); } set { SetValue(BrowseTypeDirectlyProperty, value); } } [SuppressMessage("Microsoft.Naming", "CA1721:PropertyNamesShouldNotMatchGetMethods", Justification = "By design.")] public Type Type { get { return (Type)GetValue(TypeProperty); } set { SetValue(TypeProperty, value); } } public string TypeName { get { string typeName = string.Empty; this.ToolTip = null; if (null != this.Type) { typeName = ResolveTypeName(this.Type); this.ToolTip = typeName; } return typeName; } } internal static string ResolveTypeName(Type type) { Fx.Assert(type != null, "parameter type is null!"); string typeName; if (TypePresenter.DefaultTypes.Contains(type)) { typeName = type.Name; } else { typeName = TypeNameHelper.GetDisplayName(type, true); } return typeName; } AssemblyContextControlItem AssemblyContext { get { return (null != Context ? Context.Items.GetValue<AssemblyContextControlItem>() : null); } } string BrowseTypeString { get { return (string)this.FindResource("BrowseTypeString"); } } string NullString { get { return "(null)"; } } protected override System.Windows.Automation.Peers.AutomationPeer OnCreateAutomationPeer() { return new UIElementAutomationPeer(this); } protected override void OnMouseLeftButtonDown(MouseButtonEventArgs e) { base.OnMouseLeftButtonDown(e); this.isMouseLeftButtonDown = true; e.Handled = true; } protected override void OnMouseLeftButtonUp(MouseButtonEventArgs e) { base.OnMouseLeftButtonUp(e); if (this.isMouseLeftButtonDown) { if (this.BrowseTypeDirectly) { HandleBrowseType(); } else { this.EnableEdit(); } } this.isMouseLeftButtonDown = false; e.Handled = true; } protected override void OnPreviewKeyDown(KeyEventArgs e) { base.OnPreviewKeyDown(e); if (IsPreviewKey(e.Key)) { Preview(); } } internal static bool IsPreviewKey(Key key) { return (key == Key.F2 || key == Key.Space || key == Key.Enter); } internal void Preview() { if (this.BrowseTypeDirectly) { HandleBrowseType(); } else { this.EnableEdit(); } } static void OnContextChanged(DependencyObject sender, DependencyPropertyChangedEventArgs args) { TypePresenter ctrl = (TypePresenter)sender; ctrl.OnItemsChanged(); } static void OnAllowNullChanged(DependencyObject sender, DependencyPropertyChangedEventArgs e) { TypePresenter ctrl = (TypePresenter)sender; ctrl.OnItemsChanged(); } static void OnBrowseTypeDirectlyChanged(DependencyObject sender, DependencyPropertyChangedEventArgs args) { TypePresenter ctrl = (TypePresenter)sender; if (!(bool)args.NewValue) { ctrl.typeComboBox.Visibility = Visibility.Visible; ctrl.typeTextBlock.Visibility = Visibility.Collapsed; ctrl.Focusable = false; } else { ctrl.typeComboBox.Visibility = Visibility.Collapsed; ctrl.typeTextBlock.Visibility = Visibility.Visible; ctrl.Focusable = true; } } static void OnTypeChanged(DependencyObject sender, DependencyPropertyChangedEventArgs args) { TypePresenter ctrl = (TypePresenter)sender; ctrl.lastSelection = (Type)args.NewValue; if (null != ctrl.PropertyChanged) { ctrl.PropertyChanged(ctrl, new PropertyChangedEventArgs("TypeName")); } if (null == ctrl.lastSelection) { ctrl.typeComboBox.SelectedIndex = ctrl.typeComboBox.Items.IndexOf(ctrl.NullTypeWrapper); } ctrl.Text = ctrl.TypeName; ctrl.RaiseEvent(new RoutedEventArgs(TypePresenter.TypeChangedEvent, ctrl)); } static void OnFilterChanged(DependencyObject sender, DependencyPropertyChangedEventArgs args) { TypePresenter ctrl = (TypePresenter)sender; if (null != ctrl.PropertyChanged) { ctrl.PropertyChanged(ctrl, new PropertyChangedEventArgs("Items")); } } static void OnMostRecentlyUsedTypesPropertyChanged(DependencyObject sender, DependencyPropertyChangedEventArgs args) { TypePresenter ctrl = (TypePresenter)sender; ((ObservableCollection<Type>)args.NewValue).CollectionChanged += ctrl.OnMostRecentlyUsedTypesChanged; ((ObservableCollection<Type>)args.OldValue).CollectionChanged -= ctrl.OnMostRecentlyUsedTypesChanged; ctrl.OnItemsChanged(); } static object OnCoerceMostRecentlyUsedTypes(DependencyObject sender, object value) { if (value != null) { return value; } else { return TypePresenter.DefaultMostRecentlyUsedTypes; } } void DisableEdit() { if (BrowseTypeDirectly) { this.typeTextBlock.Visibility = Visibility.Visible; this.typeComboBox.Visibility = Visibility.Collapsed; } } void EnableEdit() { if (BrowseTypeDirectly) { this.typeTextBlock.Visibility = Visibility.Collapsed; this.typeComboBox.Visibility = Visibility.Visible; } this.typeComboBox.Focus(); } // return true if KeyDownEvent should be set to handled bool HandleBrowseType() { bool retval = false; TypeWrapper wrapper = (TypeWrapper)this.typeComboBox.SelectedItem; if ((wrapper != null && !wrapper.IsTypeDefinition) || this.BrowseTypeDirectly) { Type result = null; bool? dialogResult = true; bool typeIsArray = true; bool fireEvent = false; //handle choosing an array of T if (wrapper != null && typeof(ArrayOf<>) == wrapper.Type) { fireEvent = true; this.RaiseEvent(new RoutedEventArgs(TypePresenter.TypeBrowserOpenedEvent, this)); result = wrapper.Type; } else if (wrapper != null && wrapper.DisplayName == NullString) { this.Type = null; return false; } else { retval = true; fireEvent = true; this.RaiseEvent(new RoutedEventArgs(TypePresenter.TypeBrowserOpenedEvent, this)); TypeBrowser browser = new TypeBrowser(AssemblyContext, this.Context, this.Filter); SetWindowOwner(browser); if (this.CenterTypeBrowserDialog) { browser.WindowStartupLocation = WindowStartupLocation.CenterScreen; } dialogResult = browser.ShowDialog(); if (dialogResult.HasValue && dialogResult.Value) { result = browser.ConcreteType; } typeIsArray = false; } if (dialogResult.HasValue && dialogResult.Value) { //user may have chosen generic type (IList) if (result.IsGenericTypeDefinition) { retval = true; ActivityTypeResolver wnd = new ActivityTypeResolver(); SetWindowOwner(wnd); wnd.Context = this.Context; wnd.EditedType = result; if (this.CenterActivityTypeResolverDialog) { wnd.WindowStartupLocation = WindowStartupLocation.CenterScreen; } result = (true == wnd.ShowDialog() ? wnd.ConcreteType : null); } //if we have a type if (null != result) { //if we have a ArrayOf<some type here>, create actual array type if (typeIsArray) { result = result.GetGenericArguments()[0].MakeArrayType(); } //add it to the cache if (!MostRecentlyUsedTypes.Any<Type>(p => Type.Equals(p, result))) { MostRecentlyUsedTypes.Add(result); } //and return updated result this.Type = result; } else { this.Type = this.lastSelection; } BindingExpression binding = this.typeComboBox.GetBindingExpression(ComboBox.SelectedItemProperty); binding.UpdateTarget(); } else { SetComboBoxToLastSelection(); } if (fireEvent) { this.RaiseEvent(new RoutedEventArgs(TypePresenter.TypeBrowserClosedEvent, this)); } } return retval; } void OnMostRecentlyUsedTypesChanged(object sender, NotifyCollectionChangedEventArgs e) { OnItemsChanged(); } void OnItemsChanged() { if (null != PropertyChanged) { PropertyChanged(this, new PropertyChangedEventArgs("Items")); } } void OnTypePresenterDropDownClosed(object sender, EventArgs e) { HandleBrowseType(); DisableEdit(); if (!this.BrowseTypeDirectly) { this.typeComboBox.Focus(); } else { this.Focus(); } } void OnTypePresenterKeyDown(object sender, KeyEventArgs e) { if (e.Key == Key.Enter) { if (HandleBrowseType()) { e.Handled = true; } DisableEdit(); FocusOnVisibleControl(); } } void OnTypePresenterLostKeyboardFocus(object sender, KeyboardFocusChangedEventArgs e) { if (!(e.NewFocus == this)) { if (!(this.typeComboBox.IsDropDownOpen || this.typeComboBox.IsSelectionBoxHighlighted)) { DisableEdit(); } } } void SetWindowOwner(Window wnd) { WindowHelperService.TrySetWindowOwner(this, this.Context, wnd); } // internal converter class - assign a meaningful AutomationProperties.Name to the type presenter // AutomationProperties.Name = Label + the string displayed on the TypePresenter sealed class AutomationNameConverter : IMultiValueConverter { public object Convert(object[] values, Type targetType, object parameter, CultureInfo culture) { Fx.Assert(values.Length == 2, "There should be exactly 2 values"); return (string)values[0] + ": " + (string)values[1]; } public object[] ConvertBack(object value, Type[] targetTypes, object parameter, CultureInfo culture) { Fx.Assert("Not supported!"); return null; } } } [Fx.Tag.XamlVisible(false)] public sealed class TypeWrapper { string displayName; bool isTypeDefinition; Type type; internal TypeWrapper(Type type) { this.type = type; this.isTypeDefinition = true; this.Tag = DisplayName; } internal TypeWrapper(string text, string tag, Type type) { this.displayName = text; this.isTypeDefinition = false; this.Tag = tag; this.type = type; } public string DisplayName { get { if (this.isTypeDefinition) { if (TypePresenter.DefaultTypes.Contains(this.type)) { return this.type.Name; } return TypeNameHelper.GetDisplayName(this.Type, true); } return this.displayName; } } public bool IsTypeDefinition { get { return this.isTypeDefinition; } } public object Tag { get; private set; } [SuppressMessage("Microsoft.Naming", "CA1721:PropertyNamesShouldNotMatchGetMethods", Justification = "By design.")] public Type Type { get { return this.type; } } public override string ToString() { return Tag as string; } public override bool Equals(object obj) { TypeWrapper that = obj as TypeWrapper; if (that == null) { return false; } if (that.IsTypeDefinition ^ this.IsTypeDefinition) { return false; } if (this.displayName != that.displayName) { return false; } return object.Equals(this.Type, that.Type); } public override int GetHashCode() { if (this.Type != null) { return this.Type.GetHashCode(); } else { return base.GetHashCode(); } } } sealed class ArrayOf<T> { } sealed class BrowseForType { } // internal converter class - keeps link between display friendly string representation of types // and actual underlying system type. sealed class TypeWrapperConverter : IValueConverter { TypePresenter typePresenter; //ctor - initialzied with list of loaded types into the presenter internal TypeWrapperConverter(TypePresenter typePresenter) { this.typePresenter = typePresenter; } //convert from System.Type to TypeWrapper (display friendly) public object Convert(object value, Type targetType, object parameter, CultureInfo culture) { if (null != value) { //lookup in loaded types if type is already there //if no - add it to collection - may be reused later if (null == this.typePresenter.MostRecentlyUsedTypes.SingleOrDefault<Type>(p => Type.Equals(p, (Type)value))) { this.typePresenter.MostRecentlyUsedTypes.Add((Type)value); } return new TypeWrapper((Type)value); } else { return this.typePresenter.NullTypeWrapper; } } public object ConvertBack(object value, Type targetType, object parameter, System.Globalization.CultureInfo culture) { //convert back - just get the Type property of the wrapper object TypeWrapper typeWrapper = value as TypeWrapper; if (typeWrapper == this.typePresenter.NullTypeWrapper) { return null; } if (null != typeWrapper && null != typeWrapper.Type && typeof(ArrayOf<>) != typeWrapper.Type && typeof(BrowseForType) != typeWrapper.Type) { return typeWrapper.Type; } return Binding.DoNothing; } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using Xunit; namespace System.Numerics.Tests { public class operation_ComparisonHashCodeTest { private static void VerifyComplexComparison(Complex c1, Complex c2, bool expectedResult, bool expectedResultEqual) { Assert.True(expectedResult == (c1 == c2), string.Format("c1:{0} == c2{1} is not '{2}' as expected", c1, c2, expectedResult)); Assert.True(expectedResult == (c2 == c1), string.Format("c2:{0} == c1{1} is not '{2}' as expected", c2, c1, expectedResult)); Assert.True(expectedResult != (c1 != c2), string.Format("c1:{0} != c2{1} is not '{2}' as expected", c1, c2, !expectedResult)); Assert.True(expectedResult != (c2 != c1), string.Format("c2:{0} != c1{1} is not '{2}' as expected", c2, c1, !expectedResult)); bool result = c1.Equals(c2); Assert.True(expectedResultEqual == result, string.Format("c1:{0}.Equals(c2{1}) is not '{2}' as expected", c1, c2, expectedResultEqual)); if (result) // then verify Hash Code equality { Assert.True(c1.GetHashCode() == c2.GetHashCode(), string.Format("c1:{0}.GetHashCode() == c2:{1}.GetHashCode() is 'true' as expected", c1, c2)); } result = c2.Equals(c1); Assert.True(expectedResultEqual == result, string.Format("c2:{0}.Equals(c1{1}) is not '{2}' as expected", c2, c1, expectedResultEqual)); if (result) // then verify Hash Code equality { Assert.True(c2.GetHashCode() == c1.GetHashCode(), string.Format("Obj c2:{0}.GetHashCode() == c1:{1}.GetHashCode() is 'true' as expected", c2, c1)); } Assert.True(expectedResult == c2.Equals((Object)c1), string.Format("c2:{0}.Equals((object) c1{1}) is not '{2}' as expected", c2, c1, expectedResult)); Assert.True(expectedResult == c1.Equals((Object)c2), string.Format("c1:{0}.Equals((object) c2{1}) is not '{2}' as expected", c1, c2, expectedResult)); } private static void VerifyComplexComparison(Complex c1, Complex c2, bool expectedResult) { VerifyComplexComparison(c1, c2, expectedResult, expectedResult); } [Fact] public static void RunTests_ZeroOneImaginaryOne() { double real = Support.GetRandomDoubleValue(false); double imaginary = Support.GetRandomDoubleValue(false); Complex randomComplex = new Complex(real, imaginary); real = Support.GetRandomDoubleValue(true); imaginary = Support.GetRandomDoubleValue(true); Complex randomComplexNeg = new Complex(real, imaginary); real = Support.GetSmallRandomDoubleValue(false); imaginary = Support.GetSmallRandomDoubleValue(false); Complex randomSmallComplex = new Complex(real, imaginary); real = Support.GetSmallRandomDoubleValue(true); imaginary = Support.GetSmallRandomDoubleValue(true); Complex randomSmallComplexNeg = new Complex(real, imaginary); VerifyComplexComparison(Complex.Zero, Complex.Zero, true); VerifyComplexComparison(Complex.Zero, Complex.One, false); VerifyComplexComparison(Complex.Zero, -Complex.One, false); VerifyComplexComparison(Complex.Zero, Complex.ImaginaryOne, false); VerifyComplexComparison(Complex.Zero, -Complex.ImaginaryOne, false); VerifyComplexComparison(Complex.Zero, -Complex.ImaginaryOne, false); bool expectedResult = (randomComplex.Real == 0.0 && randomComplex.Imaginary == 0.0); VerifyComplexComparison(Complex.Zero, randomComplex, expectedResult); expectedResult = (randomComplexNeg.Real == 0.0 && randomComplexNeg.Imaginary == 0.0); VerifyComplexComparison(Complex.Zero, randomComplexNeg, expectedResult); expectedResult = (randomSmallComplex.Real == 0.0 && randomSmallComplex.Imaginary == 0.0); VerifyComplexComparison(Complex.Zero, randomSmallComplex, expectedResult); expectedResult = (randomSmallComplexNeg.Real == 0.0 && randomSmallComplexNeg.Imaginary == 0.0); VerifyComplexComparison(Complex.Zero, randomSmallComplexNeg, expectedResult); VerifyComplexComparison(Complex.One, Complex.One, true); VerifyComplexComparison(Complex.One, -Complex.One, false); VerifyComplexComparison(Complex.One, Complex.ImaginaryOne, false); VerifyComplexComparison(Complex.One, -Complex.ImaginaryOne, false); expectedResult = (randomComplex.Real == 1.0 && randomComplex.Imaginary == 0.0); VerifyComplexComparison(Complex.One, randomComplex, expectedResult); expectedResult = (randomComplexNeg.Real == 1.0 && randomComplexNeg.Imaginary == 0.0); VerifyComplexComparison(Complex.One, randomComplexNeg, expectedResult); expectedResult = (randomSmallComplex.Real == 1.0 && randomSmallComplex.Imaginary == 0.0); VerifyComplexComparison(Complex.One, randomSmallComplex, expectedResult); expectedResult = (randomSmallComplexNeg.Real == 1.0 && randomSmallComplexNeg.Imaginary == 0.0); VerifyComplexComparison(Complex.One, randomSmallComplexNeg, expectedResult); VerifyComplexComparison(-Complex.One, -Complex.One, true); VerifyComplexComparison(-Complex.One, Complex.ImaginaryOne, false); VerifyComplexComparison(-Complex.One, -Complex.ImaginaryOne, false); expectedResult = (randomComplex.Real == -1.0 && randomComplex.Imaginary == 0.0); VerifyComplexComparison(-Complex.One, randomComplex, expectedResult); expectedResult = (randomComplexNeg.Real == -1.0 && randomComplexNeg.Imaginary == 0.0); VerifyComplexComparison(-Complex.One, randomComplexNeg, expectedResult); expectedResult = (randomSmallComplex.Real == -1.0 && randomSmallComplex.Imaginary == 0.0); VerifyComplexComparison(-Complex.One, randomSmallComplex, expectedResult); expectedResult = (randomSmallComplexNeg.Real == -1.0 && randomSmallComplexNeg.Imaginary == 0.0); VerifyComplexComparison(-Complex.One, randomSmallComplexNeg, expectedResult); VerifyComplexComparison(Complex.ImaginaryOne, Complex.ImaginaryOne, true); VerifyComplexComparison(Complex.ImaginaryOne, -Complex.ImaginaryOne, false); expectedResult = (randomComplex.Real == 0.0 && randomComplex.Imaginary == 1.0); VerifyComplexComparison(Complex.ImaginaryOne, randomComplex, expectedResult); expectedResult = (randomComplexNeg.Real == 0.0 && randomComplexNeg.Imaginary == 1.0); VerifyComplexComparison(Complex.ImaginaryOne, randomComplexNeg, expectedResult); expectedResult = (randomSmallComplex.Real == 0.0 && randomSmallComplex.Imaginary == 1.0); VerifyComplexComparison(Complex.ImaginaryOne, randomSmallComplex, expectedResult); expectedResult = (randomSmallComplexNeg.Real == 0.0 && randomSmallComplexNeg.Imaginary == 1.0); VerifyComplexComparison(Complex.ImaginaryOne, randomSmallComplexNeg, expectedResult); VerifyComplexComparison(-Complex.ImaginaryOne, -Complex.ImaginaryOne, true); expectedResult = (randomComplex.Real == 0.0 && randomComplex.Imaginary == -1.0); VerifyComplexComparison(-Complex.ImaginaryOne, randomComplex, expectedResult); expectedResult = (randomComplexNeg.Real == 0.0 && randomComplexNeg.Imaginary == -1.0); VerifyComplexComparison(-Complex.ImaginaryOne, randomComplexNeg, expectedResult); expectedResult = (randomSmallComplex.Real == 0.0 && randomSmallComplex.Imaginary == -1.0); VerifyComplexComparison(-Complex.ImaginaryOne, randomSmallComplex, expectedResult); expectedResult = (randomSmallComplexNeg.Real == 0.0 && randomSmallComplexNeg.Imaginary == -1.0); VerifyComplexComparison(-Complex.ImaginaryOne, randomSmallComplexNeg, expectedResult); } [Fact] public static void RunTests_MaxMinValues() { double real = Support.GetRandomDoubleValue(false); double imaginary = Support.GetRandomDoubleValue(false); Complex randomComplex = new Complex(real, imaginary); real = Support.GetRandomDoubleValue(true); imaginary = Support.GetRandomDoubleValue(true); Complex randomComplexNeg = new Complex(real, imaginary); real = Support.GetSmallRandomDoubleValue(false); imaginary = Support.GetSmallRandomDoubleValue(false); Complex randomSmallComplex = new Complex(real, imaginary); real = Support.GetSmallRandomDoubleValue(true); imaginary = Support.GetSmallRandomDoubleValue(true); Complex randomSmallComplexNeg = new Complex(real, imaginary); Complex maxComplex = new Complex(double.MaxValue, double.MaxValue); Complex minComplex = new Complex(double.MinValue, double.MinValue); Complex maxReal = new Complex(double.MaxValue, 0.0); Complex minReal = new Complex(double.MinValue, 0.0); Complex maxImaginary = new Complex(0.0, double.MaxValue); Complex minImaginary = new Complex(0.0, double.MinValue); VerifyComplexComparison(maxComplex, maxComplex, true); VerifyComplexComparison(maxComplex, minComplex, false); VerifyComplexComparison(maxComplex, maxReal, false); VerifyComplexComparison(maxComplex, minReal, false); VerifyComplexComparison(maxComplex, maxImaginary, false); VerifyComplexComparison(maxComplex, minImaginary, false); bool expectedResult = (randomComplex.Real == maxComplex.Real && randomComplex.Imaginary == maxComplex.Imaginary); VerifyComplexComparison(maxComplex, randomComplex, expectedResult); expectedResult = (randomComplexNeg.Real == maxComplex.Real && randomComplexNeg.Imaginary == maxComplex.Imaginary); VerifyComplexComparison(maxComplex, randomComplexNeg, expectedResult); expectedResult = (randomSmallComplex.Real == maxComplex.Real && randomSmallComplex.Imaginary == maxComplex.Imaginary); VerifyComplexComparison(maxComplex, randomSmallComplex, expectedResult); expectedResult = (randomSmallComplexNeg.Real == maxComplex.Real && randomSmallComplexNeg.Imaginary == maxComplex.Imaginary); VerifyComplexComparison(maxComplex, randomSmallComplexNeg, expectedResult); VerifyComplexComparison(minComplex, minComplex, true); VerifyComplexComparison(minComplex, maxReal, false); VerifyComplexComparison(minComplex, maxImaginary, false); VerifyComplexComparison(minComplex, minImaginary, false); expectedResult = (randomComplex.Real == minComplex.Real && randomComplex.Imaginary == minComplex.Imaginary); VerifyComplexComparison(minComplex, randomComplex, expectedResult); expectedResult = (randomComplexNeg.Real == minComplex.Real && randomComplexNeg.Imaginary == minComplex.Imaginary); VerifyComplexComparison(minComplex, randomComplexNeg, expectedResult); expectedResult = (randomSmallComplex.Real == minComplex.Real && randomSmallComplex.Imaginary == minComplex.Imaginary); VerifyComplexComparison(minComplex, randomSmallComplex, expectedResult); expectedResult = (randomSmallComplexNeg.Real == minComplex.Real && randomSmallComplexNeg.Imaginary == minComplex.Imaginary); VerifyComplexComparison(minComplex, randomSmallComplexNeg, expectedResult); VerifyComplexComparison(maxReal, maxReal, true); VerifyComplexComparison(maxReal, minReal, false); VerifyComplexComparison(maxReal, maxImaginary, false); VerifyComplexComparison(maxReal, minImaginary, false); expectedResult = (randomComplex.Real == maxReal.Real && randomComplex.Imaginary == maxReal.Imaginary); VerifyComplexComparison(maxReal, randomComplex, expectedResult); expectedResult = (randomComplexNeg.Real == maxReal.Real && randomComplexNeg.Imaginary == maxReal.Imaginary); VerifyComplexComparison(maxReal, randomComplexNeg, expectedResult); expectedResult = (randomSmallComplex.Real == maxReal.Real && randomSmallComplex.Imaginary == maxReal.Imaginary); VerifyComplexComparison(maxReal, randomSmallComplex, expectedResult); expectedResult = (randomSmallComplexNeg.Real == maxReal.Real && randomSmallComplexNeg.Imaginary == maxReal.Imaginary); VerifyComplexComparison(maxReal, randomSmallComplexNeg, expectedResult); VerifyComplexComparison(minReal, minReal, true); VerifyComplexComparison(minReal, maxImaginary, false); VerifyComplexComparison(minReal, minImaginary, false); expectedResult = (randomComplex.Real == minReal.Real && randomComplex.Imaginary == minReal.Imaginary); VerifyComplexComparison(minReal, randomComplex, expectedResult); expectedResult = (randomComplexNeg.Real == minReal.Real && randomComplexNeg.Imaginary == minReal.Imaginary); VerifyComplexComparison(minReal, randomComplexNeg, expectedResult); expectedResult = (randomSmallComplex.Real == minReal.Real && randomSmallComplex.Imaginary == minReal.Imaginary); VerifyComplexComparison(minReal, randomSmallComplex, expectedResult); expectedResult = (randomSmallComplexNeg.Real == minReal.Real && randomSmallComplexNeg.Imaginary == minReal.Imaginary); VerifyComplexComparison(minReal, randomSmallComplexNeg, expectedResult); VerifyComplexComparison(maxImaginary, maxImaginary, true); VerifyComplexComparison(maxImaginary, minImaginary, false); expectedResult = (randomComplex.Real == maxImaginary.Real && randomComplex.Imaginary == maxImaginary.Imaginary); VerifyComplexComparison(maxImaginary, randomComplex, expectedResult); expectedResult = (randomComplexNeg.Real == maxImaginary.Real && randomComplexNeg.Imaginary == maxImaginary.Imaginary); VerifyComplexComparison(maxImaginary, randomComplexNeg, expectedResult); expectedResult = (randomSmallComplex.Real == maxImaginary.Real && randomSmallComplex.Imaginary == maxImaginary.Imaginary); VerifyComplexComparison(maxImaginary, randomSmallComplex, expectedResult); expectedResult = (randomSmallComplexNeg.Real == maxImaginary.Real && randomSmallComplexNeg.Imaginary == maxImaginary.Imaginary); VerifyComplexComparison(maxImaginary, randomSmallComplexNeg, expectedResult); VerifyComplexComparison(minImaginary, minImaginary, true); expectedResult = (randomComplex.Real == minImaginary.Real && randomComplex.Imaginary == minImaginary.Imaginary); VerifyComplexComparison(minImaginary, randomComplex, expectedResult); expectedResult = (randomComplexNeg.Real == minImaginary.Real && randomComplexNeg.Imaginary == minImaginary.Imaginary); VerifyComplexComparison(minImaginary, randomComplexNeg, expectedResult); expectedResult = (randomSmallComplex.Real == minImaginary.Real && randomSmallComplex.Imaginary == minImaginary.Imaginary); VerifyComplexComparison(minImaginary, randomSmallComplex, expectedResult); expectedResult = (randomSmallComplexNeg.Real == minImaginary.Real && randomSmallComplexNeg.Imaginary == minImaginary.Imaginary); VerifyComplexComparison(minImaginary, randomSmallComplexNeg, expectedResult); } [Fact] public static void RunTests_InvalidValues() { double real = Support.GetRandomDoubleValue(false); double imaginary = Support.GetRandomDoubleValue(false); Complex randomComplex = new Complex(real, imaginary); foreach (double imaginaryInvalid in Support.doubleInvalidValues) { real = Support.GetRandomDoubleValue(false); Complex randomInvalidComplex = new Complex(real, imaginaryInvalid); VerifyComplexComparison(randomInvalidComplex, randomComplex, false); VerifyComplexComparison(randomInvalidComplex, randomInvalidComplex, !double.IsNaN(imaginaryInvalid), true); } foreach (double realInvalid in Support.doubleInvalidValues) { imaginary = Support.GetRandomDoubleValue(false); Complex randomInvalidComplex = new Complex(realInvalid, imaginary); VerifyComplexComparison(randomInvalidComplex, randomComplex, false); VerifyComplexComparison(randomInvalidComplex, randomInvalidComplex, !double.IsNaN(realInvalid), true); } foreach (double realInvalid in Support.doubleInvalidValues) { foreach (double imaginaryInvalid in Support.doubleInvalidValues) { Complex randomInvalidComplex = new Complex(realInvalid, imaginaryInvalid); VerifyComplexComparison(randomInvalidComplex, randomComplex, false); VerifyComplexComparison(randomInvalidComplex, randomInvalidComplex, !(double.IsNaN(realInvalid) || double.IsNaN(imaginaryInvalid)), true); } } } [Fact] public static void RunTests_WithNonComplexObject() { // local variables double real = Support.GetSmallRandomDoubleValue(false); Complex randomComplex = new Complex(real, 0.0); // verify with same double value Assert.False(randomComplex.Equals((Object)real), string.Format("Obj randomComplex:{0}.Equals((object) real) is not 'false' as expected", randomComplex, real)); // verify with null Assert.False(randomComplex.Equals((Object)null), string.Format("Obj randomComplex:{0}.Equals((object) null) is not 'false' as expected", randomComplex)); // verify with 0 Assert.False(randomComplex.Equals((Object)0), string.Format("Obj randomComplex:{0}.Equals((object) 0) is not 'false' as expected", randomComplex)); // verify with string Assert.False(randomComplex.Equals((Object)"0"), string.Format("Obj randomComplex:{0}.Equals((object) \"0\") is not 'false' as expected", randomComplex)); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.IdentityModel.Selectors; using System.IdentityModel.Tokens; using System.Net.Http; using System.Net.Security; using System.Runtime; using System.Security.Cryptography.X509Certificates; using System.ServiceModel.Description; using System.ServiceModel.Security; using System.ServiceModel.Security.Tokens; using System.Threading.Tasks; namespace System.ServiceModel.Channels { internal class HttpsChannelFactory<TChannel> : HttpChannelFactory<TChannel> { private X509CertificateValidator _sslCertificateValidator; private Func<HttpRequestMessage, X509Certificate2, X509Chain, SslPolicyErrors, bool> _remoteCertificateValidationCallback; internal HttpsChannelFactory(HttpsTransportBindingElement httpsBindingElement, BindingContext context) : base(httpsBindingElement, context) { RequireClientCertificate = httpsBindingElement.RequireClientCertificate; ClientCredentials credentials = context.BindingParameters.Find<ClientCredentials>(); if (credentials != null && credentials.ServiceCertificate.SslCertificateAuthentication != null) { _sslCertificateValidator = credentials.ServiceCertificate.SslCertificateAuthentication.GetCertificateValidator(); _remoteCertificateValidationCallback = RemoteCertificateValidationCallback; } } public override string Scheme { get { return UriEx.UriSchemeHttps; } } public bool RequireClientCertificate { get; } public override bool IsChannelBindingSupportEnabled { get { return false; } } public override T GetProperty<T>() { return base.GetProperty<T>(); } protected override void ValidateCreateChannelParameters(EndpointAddress remoteAddress, Uri via) { if (string.Compare(via.Scheme, "wss", StringComparison.OrdinalIgnoreCase) != 0) { ValidateScheme(via); } if (MessageVersion.Addressing == AddressingVersion.None && remoteAddress.Uri != via) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(CreateToMustEqualViaException(remoteAddress.Uri, via)); } } protected override TChannel OnCreateChannelCore(EndpointAddress address, Uri via) { ValidateCreateChannelParameters(address, via); ValidateWebSocketTransportUsage(); if (typeof(TChannel) == typeof(IRequestChannel)) { return (TChannel)(object)new HttpsClientRequestChannel((HttpsChannelFactory<IRequestChannel>)(object)this, address, via, ManualAddressing); } else { return (TChannel)(object)new ClientWebSocketTransportDuplexSessionChannel((HttpChannelFactory<IDuplexSessionChannel>)(object)this, _clientWebSocketFactory, address, via); } } protected override bool IsSecurityTokenManagerRequired() { return RequireClientCertificate || base.IsSecurityTokenManagerRequired(); } private void OnOpenCore() { if (RequireClientCertificate && SecurityTokenManager == null) { throw Fx.AssertAndThrow("HttpsChannelFactory: SecurityTokenManager is null on open."); } } protected override void OnEndOpen(IAsyncResult result) { base.OnEndOpen(result); OnOpenCore(); } protected override void OnOpen(TimeSpan timeout) { base.OnOpen(timeout); OnOpenCore(); } protected internal override async Task OnOpenAsync(TimeSpan timeout) { await base.OnOpenAsync(timeout); OnOpenCore(); } internal SecurityTokenProvider CreateAndOpenCertificateTokenProvider(EndpointAddress target, Uri via, ChannelParameterCollection channelParameters, TimeSpan timeout) { if (!RequireClientCertificate) { return null; } SecurityTokenProvider certificateProvider = TransportSecurityHelpers.GetCertificateTokenProvider( SecurityTokenManager, target, via, Scheme, channelParameters); SecurityUtils.OpenTokenProviderIfRequired(certificateProvider, timeout); return certificateProvider; } internal SecurityTokenContainer GetCertificateSecurityToken(SecurityTokenProvider certificateProvider, EndpointAddress to, Uri via, ChannelParameterCollection channelParameters, ref TimeoutHelper timeoutHelper) { SecurityToken token = null; SecurityTokenContainer tokenContainer = null; SecurityTokenProvider requestCertificateProvider; if (ManualAddressing && RequireClientCertificate) { requestCertificateProvider = CreateAndOpenCertificateTokenProvider(to, via, channelParameters, timeoutHelper.RemainingTime()); } else { requestCertificateProvider = certificateProvider; } if (requestCertificateProvider != null) { token = requestCertificateProvider.GetTokenAsync(timeoutHelper.RemainingTime()).GetAwaiter().GetResult(); } if (ManualAddressing && RequireClientCertificate) { SecurityUtils.AbortTokenProviderIfRequired(requestCertificateProvider); } if (token != null) { tokenContainer = new SecurityTokenContainer(token); } return tokenContainer; } private void AddServerCertMappingOrSetRemoteCertificateValidationCallback(HttpClientHandler httpClientHandler, EndpointAddress to) { Fx.Assert(httpClientHandler != null, "httpClientHandler should not be null."); if (_sslCertificateValidator != null) { httpClientHandler.ServerCertificateCustomValidationCallback = _remoteCertificateValidationCallback; } else { if (to.Identity is X509CertificateEndpointIdentity) { HttpTransportSecurityHelpers.SetServerCertificateValidationCallback(httpClientHandler); } } } private bool RemoteCertificateValidationCallback(HttpRequestMessage sender, X509Certificate2 certificate, X509Chain chain, SslPolicyErrors sslPolicyErrors) { Fx.Assert(_sslCertificateValidator != null, "sslCertificateValidator should not be null."); try { _sslCertificateValidator.Validate(certificate); return true; } catch (SecurityTokenValidationException ex) { FxTrace.Exception.AsInformation(ex); return false; } catch (Exception ex) { if (Fx.IsFatal(ex)) { throw; } FxTrace.Exception.AsWarning(ex); return false; } } internal override HttpClientHandler GetHttpClientHandler(EndpointAddress to, SecurityTokenContainer clientCertificateToken) { HttpClientHandler handler = base.GetHttpClientHandler(to, clientCertificateToken); if (RequireClientCertificate) { SetCertificate(handler, clientCertificateToken); } AddServerCertMappingOrSetRemoteCertificateValidationCallback(handler, to); return handler; } internal override bool IsExpectContinueHeaderRequired => RequireClientCertificate || base.IsExpectContinueHeaderRequired; private static void SetCertificate(HttpClientHandler handler, SecurityTokenContainer clientCertificateToken) { if (clientCertificateToken != null) { X509SecurityToken x509Token = (X509SecurityToken)clientCertificateToken.Token; ValidateClientCertificate(x509Token.Certificate); handler.ClientCertificateOptions = ClientCertificateOption.Manual; handler.ClientCertificates.Add(x509Token.Certificate); } } private static void ValidateClientCertificate(X509Certificate2 certificate) { if (Fx.IsUap) { using (var store = new X509Store(StoreName.My, StoreLocation.CurrentUser)) { store.Open(OpenFlags.ReadOnly); if (store.Certificates.Find(X509FindType.FindByThumbprint, certificate.GetCertHashString(), true).Count == 0) { throw ExceptionHelper.PlatformNotSupported("Certificate could not be found in the MY store."); } } } } protected class HttpsClientRequestChannel : HttpClientRequestChannel { private SecurityTokenProvider _certificateProvider; public HttpsClientRequestChannel(HttpsChannelFactory<IRequestChannel> factory, EndpointAddress to, Uri via, bool manualAddressing) : base(factory, to, via, manualAddressing) { Factory = factory; } public new HttpsChannelFactory<IRequestChannel> Factory { get; } private void CreateAndOpenTokenProvider(TimeSpan timeout) { if (!ManualAddressing && Factory.RequireClientCertificate) { _certificateProvider = Factory.CreateAndOpenCertificateTokenProvider(RemoteAddress, Via, ChannelParameters, timeout); } } private void CloseTokenProvider(TimeSpan timeout) { if (_certificateProvider != null) { SecurityUtils.CloseTokenProviderIfRequired(_certificateProvider, timeout); } } private void AbortTokenProvider() { if (_certificateProvider != null) { SecurityUtils.AbortTokenProviderIfRequired(_certificateProvider); } } protected override IAsyncResult OnBeginOpen(TimeSpan timeout, AsyncCallback callback, object state) { TimeoutHelper timeoutHelper = new TimeoutHelper(timeout); CreateAndOpenTokenProvider(timeoutHelper.RemainingTime()); return base.OnBeginOpen(timeoutHelper.RemainingTime(), callback, state); } protected override void OnOpen(TimeSpan timeout) { TimeoutHelper timeoutHelper = new TimeoutHelper(timeout); CreateAndOpenTokenProvider(timeoutHelper.RemainingTime()); base.OnOpen(timeoutHelper.RemainingTime()); } internal protected override Task OnOpenAsync(TimeSpan timeout) { TimeoutHelper timeoutHelper = new TimeoutHelper(timeout); CreateAndOpenTokenProvider(timeoutHelper.RemainingTime()); return base.OnOpenAsync(timeoutHelper.RemainingTime()); } protected override void OnAbort() { AbortTokenProvider(); base.OnAbort(); } protected override IAsyncResult OnBeginClose(TimeSpan timeout, AsyncCallback callback, object state) { TimeoutHelper timeoutHelper = new TimeoutHelper(timeout); CloseTokenProvider(timeoutHelper.RemainingTime()); return base.OnBeginClose(timeoutHelper.RemainingTime(), callback, state); } protected override void OnClose(TimeSpan timeout) { TimeoutHelper timeoutHelper = new TimeoutHelper(timeout); CloseTokenProvider(timeoutHelper.RemainingTime()); base.OnClose(timeoutHelper.RemainingTime()); } internal override void OnHttpRequestCompleted(HttpRequestMessage request) { } internal override async Task<HttpClient> GetHttpClientAsync(EndpointAddress to, Uri via, TimeoutHelper timeoutHelper) { SecurityTokenContainer clientCertificateToken = Factory.GetCertificateSecurityToken(_certificateProvider, to, via, ChannelParameters, ref timeoutHelper); HttpClient httpClient = await base.GetHttpClientAsync(to, via, clientCertificateToken, timeoutHelper); return httpClient; } } } }
/************************************************************************************ Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved. Licensed under the Oculus Utilities SDK License Version 1.31 (the "License"); you may not use the Utilities SDK except in compliance with the License, which is provided at the time of installation or download, or which otherwise accompanies this software in either electronic or hard copy form. You may obtain a copy of the License at https://developer.oculus.com/licenses/utilities-1.31 Unless required by applicable law or agreed to in writing, the Utilities SDK distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ************************************************************************************/ #if UNITY_EDITOR using UnityEngine; using UnityEditor; using System.Collections.Generic; using Assets.OVR.Scripts; /// <summary> ///Scans the project and warns about the following conditions: ///Audio sources > 16 ///Using MSAA levels other than recommended level ///Excessive pixel lights (>1 on Gear VR; >3 on Rift) ///Directional Lightmapping Modes (on Gear; use Non-Directional) ///Preload audio setting on individual audio clips ///Decompressing audio clips on load ///Disabling occlusion mesh ///Android target API level set to 21 or higher ///Unity skybox use (on by default, but if you can't see the skybox switching to Color is much faster on Gear) ///Lights marked as "baked" but that were not included in the last bake (and are therefore realtime). ///Lack of static batching and dynamic batching settings activated. ///Full screen image effects (Gear) ///Warn about large textures that are marked as uncompressed. ///32-bit depth buffer (use 16) ///Use of projectors (Gear; can be used carefully but slow enough to warrant a warning) ///Maybe in the future once quantified: Graphics jobs and IL2CPP on Gear. ///Real-time global illumination ///No texture compression, or non-ASTC texture compression as a global setting (Gear). ///Using deferred rendering ///Excessive texture resolution after LOD bias (>2k on Gear VR; >4k on Rift) ///Not using trilinear or aniso filtering and not generating mipmaps ///Excessive render scale (>1.2) ///Slow physics settings: Sleep Threshold < 0.005, Default Contact Offset < 0.01, Solver Iteration Count > 6 ///Shadows on when approaching the geometry or draw call limits ///Non-static objects with colliders that are missing rigidbodies on themselves or in the parent chain. ///No initialization of GPU/CPU throttling settings, or init to dangerous values (-1 or > 3) (Gear) ///Using inefficient effects: SSAO, motion blur, global fog, parallax mapping, etc. ///Too many Overlay layers ///Use of Standard shader or Standard Specular shader on Gear. More generally, excessive use of multipass shaders (legacy specular, etc). ///Multiple cameras with clears (on Gear, potential for excessive fill cost) ///Excessive shader passes (>2) ///Material pointers that have been instanced in the editor (esp. if we could determine that the instance has no deltas from the original) ///Excessive draw calls (>150 on Gear VR; >2000 on Rift) ///Excessive tris or verts (>100k on Gear VR; >1M on Rift) ///Large textures, lots of prefabs in startup scene (for bootstrap optimization) ///GPU skinning: testing Android-only, as most Rift devs are GPU-bound. /// </summary> [InitializeOnLoadAttribute] public class OVRLint : EditorWindow { //TODO: The following require reflection or static analysis. ///Use of ONSP reflections (Gear) ///Use of LoadLevelAsync / LoadLevelAdditiveAsync (on Gear, this kills frame rate so dramatically it's probably better to just go to black and load synchronously) ///Use of Linq in non-editor assemblies (common cause of GCs). Minor: use of foreach. ///Use of Unity WWW (exceptionally high overhead for large file downloads, but acceptable for tiny gets). ///Declared but empty Awake/Start/Update/OnCollisionEnter/OnCollisionExit/OnCollisionStay. Also OnCollision* star methods that declare the Collision argument but do not reference it (omitting it short-circuits the collision contact calculation). private static List<FixRecord> mRecords = new List<FixRecord>(); private static List<FixRecord> mRuntimeEditModeRequiredRecords = new List<FixRecord>(); #if !UNITY_2017_2_OR_NEWER private static bool mWasPlaying = false; #endif private Vector2 mScrollPosition; [MenuItem("Oculus/Tools/OVR Performance Lint Tool")] static void Init() { // Get existing open window or if none, make a new one: EditorWindow.GetWindow(typeof(OVRLint)); OVRPlugin.SendEvent("perf_lint", "activated"); OVRLint.RunCheck(); #if !UNITY_2017_2_OR_NEWER mWasPlaying = EditorApplication.isPlaying; #endif } OVRLint() { #if UNITY_2017_2_OR_NEWER EditorApplication.playModeStateChanged += HandlePlayModeState; #else EditorApplication.playmodeStateChanged += () => { // When Unity starts playing, it would also trigger play mode changed event with isPlaying == false // Fixes should only be applied when it was transitioned from playing mode if (!EditorApplication.isPlaying && mWasPlaying) { ApplyEditModeRequiredFix(); mWasPlaying = false; } else { mWasPlaying = true; } }; #endif } #if UNITY_2017_2_OR_NEWER private static void HandlePlayModeState(PlayModeStateChange state) { if (state == PlayModeStateChange.EnteredEditMode) { ApplyEditModeRequiredFix(); } } #endif private static void ApplyEditModeRequiredFix() { // Apply runtime fixes that require edit mode when applying fix foreach (FixRecord record in mRuntimeEditModeRequiredRecords) { record.fixMethod(null, false, 0); OVRPlugin.SendEvent("perf_lint_apply_fix", record.category); record.complete = true; } mRuntimeEditModeRequiredRecords.Clear(); } void OnGUI() { GUILayout.Label("OVR Performance Lint Tool", EditorStyles.boldLabel); if (GUILayout.Button("Refresh", EditorStyles.toolbarButton, GUILayout.ExpandWidth(false))) { RunCheck(); } string lastCategory = ""; mScrollPosition = EditorGUILayout.BeginScrollView(mScrollPosition); for (int x = 0; x < mRecords.Count; x++) { FixRecord record = mRecords[x]; if (!record.category.Equals(lastCategory)) // new category { lastCategory = record.category; EditorGUILayout.Separator(); EditorGUILayout.BeginHorizontal(); GUILayout.Label(lastCategory, EditorStyles.label, GUILayout.Width(200)); bool moreThanOne = (x + 1 < mRecords.Count && mRecords[x + 1].category.Equals(lastCategory)); if (record.buttonNames != null && record.buttonNames.Length > 0) { if (moreThanOne) { GUILayout.Label("Apply to all:", EditorStyles.label, GUILayout.Width(75)); for (int y = 0; y < record.buttonNames.Length; y++) { if (GUILayout.Button(record.buttonNames[y], EditorStyles.toolbarButton, GUILayout.Width(200))) { List<FixRecord> recordsToProcess = new List<FixRecord>(); for (int z = x; z < mRecords.Count; z++) { FixRecord thisRecord = mRecords[z]; bool isLast = false; if (z + 1 >= mRecords.Count || !mRecords[z + 1].category.Equals(lastCategory)) { isLast = true; } if (!thisRecord.complete) { recordsToProcess.Add(thisRecord); } if (isLast) { break; } } UnityEngine.Object[] undoObjects = new UnityEngine.Object[recordsToProcess.Count]; for (int z = 0; z < recordsToProcess.Count; z++) { undoObjects[z] = recordsToProcess[z].targetObject; } Undo.RecordObjects(undoObjects, record.category + " (Multiple)"); for (int z = 0; z < recordsToProcess.Count; z++) { FixRecord thisRecord = recordsToProcess[z]; thisRecord.fixMethod(thisRecord.targetObject, (z + 1 == recordsToProcess.Count), y); OVRPlugin.SendEvent("perf_lint_apply_fix", thisRecord.category); thisRecord.complete = true; } } } } } EditorGUILayout.EndHorizontal(); if (moreThanOne || record.targetObject) { GUILayout.Label(record.message); } } EditorGUILayout.BeginHorizontal(); GUI.enabled = !record.complete; if (record.targetObject) { EditorGUILayout.ObjectField(record.targetObject, record.targetObject.GetType(), true); } else { GUILayout.Label(record.message); } if (record.buttonNames != null) { for (int y = 0; y < record.buttonNames.Length; y++) { if (GUILayout.Button(record.buttonNames[y], EditorStyles.toolbarButton, GUILayout.Width(200))) { if (record.targetObject != null) { Undo.RecordObject(record.targetObject, record.category); } if (record.editModeRequired) { // Add to the fix record list that requires edit mode mRuntimeEditModeRequiredRecords.Add(record); } else { // Apply the fix directly record.fixMethod(record.targetObject, true, y); OVRPlugin.SendEvent("perf_lint_apply_fix", record.category); record.complete = true; } if (mRuntimeEditModeRequiredRecords.Count != 0) { // Stop the scene to apply edit mode required records EditorApplication.ExecuteMenuItem("Edit/Play"); } } } } GUI.enabled = true; EditorGUILayout.EndHorizontal(); } EditorGUILayout.EndScrollView(); } public static int RunCheck() { mRecords.Clear(); mRuntimeEditModeRequiredRecords.Clear(); CheckStaticCommonIssues(); #if UNITY_ANDROID CheckStaticAndroidIssues(); #endif if (EditorApplication.isPlaying) { CheckRuntimeCommonIssues(); #if UNITY_ANDROID CheckRuntimeAndroidIssues(); #endif } mRecords.Sort(delegate (FixRecord record1, FixRecord record2) { return record1.category.CompareTo(record2.category); }); return mRecords.Count; } static void AddFix(string category, string message, FixMethodDelegate method, UnityEngine.Object target, bool editModeRequired, params string[] buttons) { OVRPlugin.SendEvent("perf_lint_add_fix", category); mRecords.Add(new FixRecord(category, message, method, target, editModeRequired, buttons)); } static void CheckStaticCommonIssues() { if (OVRManager.IsUnityAlphaOrBetaVersion()) { AddFix("General", OVRManager.UnityAlphaOrBetaVersionWarningMessage, null, null, false); } if (QualitySettings.anisotropicFiltering != AnisotropicFiltering.Enable && QualitySettings.anisotropicFiltering != AnisotropicFiltering.ForceEnable) { AddFix("Optimize Aniso", "Anisotropic filtering is recommended for optimal image sharpness and GPU performance.", delegate (UnityEngine.Object obj, bool last, int selected) { // Ideally this would be multi-option: offer Enable or ForceEnable. QualitySettings.anisotropicFiltering = AnisotropicFiltering.Enable; }, null, false, "Fix"); } #if UNITY_ANDROID int recommendedPixelLightCount = 1; #else int recommendedPixelLightCount = 3; #endif if (QualitySettings.pixelLightCount > recommendedPixelLightCount) { AddFix("Optimize Pixel Light Count", "For GPU performance set no more than " + recommendedPixelLightCount + " pixel lights in Quality Settings (currently " + QualitySettings.pixelLightCount + ").", delegate (UnityEngine.Object obj, bool last, int selected) { QualitySettings.pixelLightCount = recommendedPixelLightCount; }, null, false, "Fix"); } #if false // Should we recommend this? Seems to be mutually exclusive w/ dynamic batching. if (!PlayerSettings.graphicsJobs) { AddFix ("Optimize Graphics Jobs", "For CPU performance, please use graphics jobs.", delegate(UnityEngine.Object obj, bool last, int selected) { PlayerSettings.graphicsJobs = true; }, null, false, "Fix"); } #endif #if UNITY_2017_2_OR_NEWER if ((!PlayerSettings.MTRendering || !PlayerSettings.GetMobileMTRendering(BuildTargetGroup.Android))) #else if ((!PlayerSettings.MTRendering || !PlayerSettings.mobileMTRendering)) #endif { AddFix("Optimize MT Rendering", "For CPU performance, please enable multithreaded rendering.", delegate (UnityEngine.Object obj, bool last, int selected) { #if UNITY_2017_2_OR_NEWER PlayerSettings.SetMobileMTRendering(BuildTargetGroup.Standalone, true); PlayerSettings.SetMobileMTRendering(BuildTargetGroup.Android, true); #else PlayerSettings.MTRendering = PlayerSettings.mobileMTRendering = true; #endif }, null, false, "Fix"); } #if UNITY_ANDROID if (!PlayerSettings.use32BitDisplayBuffer) { AddFix("Optimize Display Buffer Format", "We recommend to enable use32BitDisplayBuffer.", delegate (UnityEngine.Object obj, bool last, int selected) { PlayerSettings.use32BitDisplayBuffer = true; }, null, false, "Fix"); } #endif #if UNITY_2017_3_OR_NEWER && !UNITY_ANDROID if (!PlayerSettings.VROculus.dashSupport) { AddFix("Enable Dash Integration", "We recommend to enable Dash Integration for better user experience.", delegate (UnityEngine.Object obj, bool last, int selected) { PlayerSettings.VROculus.dashSupport = true; }, null, false, "Fix"); } if (!PlayerSettings.VROculus.sharedDepthBuffer) { AddFix("Enable Depth Buffer Sharing", "We recommend to enable Depth Buffer Sharing for better user experience on Oculus Dash.", delegate (UnityEngine.Object obj, bool last, int selected) { PlayerSettings.VROculus.sharedDepthBuffer = true; }, null, false, "Fix"); } #endif BuildTargetGroup target = EditorUserBuildSettings.selectedBuildTargetGroup; var tier = UnityEngine.Rendering.GraphicsTier.Tier1; var tierSettings = UnityEditor.Rendering.EditorGraphicsSettings.GetTierSettings(target, tier); if ((tierSettings.renderingPath == RenderingPath.DeferredShading || tierSettings.renderingPath == RenderingPath.DeferredLighting)) { AddFix("Optimize Rendering Path", "For CPU performance, please do not use deferred shading.", delegate (UnityEngine.Object obj, bool last, int selected) { tierSettings.renderingPath = RenderingPath.Forward; UnityEditor.Rendering.EditorGraphicsSettings.SetTierSettings(target, tier, tierSettings); }, null, false, "Use Forward"); } if (PlayerSettings.stereoRenderingPath == StereoRenderingPath.MultiPass) { AddFix("Optimize Stereo Rendering", "For CPU performance, please enable single-pass or instanced stereo rendering.", delegate (UnityEngine.Object obj, bool last, int selected) { PlayerSettings.stereoRenderingPath = StereoRenderingPath.Instancing; }, null, false, "Fix"); } if (LightmapSettings.lightmaps.Length > 0 && LightmapSettings.lightmapsMode != LightmapsMode.NonDirectional) { AddFix("Optimize Lightmap Directionality", "Switching from directional lightmaps to non-directional lightmaps can save a small amount of GPU time.", delegate (UnityEngine.Object obj, bool last, int selected) { LightmapSettings.lightmapsMode = LightmapsMode.NonDirectional; }, null, false, "Switch to non-directional lightmaps"); } if (Lightmapping.realtimeGI) { AddFix("Disable Realtime GI", "Disabling real-time global illumination can improve GPU performance.", delegate (UnityEngine.Object obj, bool last, int selected) { Lightmapping.realtimeGI = false; }, null, false, "Set Lightmapping.realtimeGI = false."); } var lights = GameObject.FindObjectsOfType<Light>(); for (int i = 0; i < lights.Length; ++i) { #if UNITY_2017_3_OR_NEWER if (lights [i].type != LightType.Directional && !lights [i].bakingOutput.isBaked && IsLightBaked(lights[i])) #else if (lights[i].type != LightType.Directional && !lights[i].isBaked && IsLightBaked(lights[i])) #endif { AddFix("Unbaked Lights", "The following lights in the scene are marked as Baked, but they don't have up to date lightmap data. Generate the lightmap data, or set it to auto-generate, in Window->Lighting->Settings.", null, lights[i], false, null); } if (lights[i].shadows != LightShadows.None && !IsLightBaked(lights[i])) { AddFix("Optimize Shadows", "For CPU performance, consider disabling shadows on realtime lights.", delegate (UnityEngine.Object obj, bool last, int selected) { Light thisLight = (Light)obj; thisLight.shadows = LightShadows.None; }, lights[i], false, "Set \"Shadow Type\" to \"No Shadows\""); } } var sources = GameObject.FindObjectsOfType<AudioSource>(); if (sources.Length > 16) { List<AudioSource> playingAudioSources = new List<AudioSource>(); foreach (var audioSource in sources) { if (audioSource.isPlaying) { playingAudioSources.Add(audioSource); } } if (playingAudioSources.Count > 16) { // Sort playing audio sources by priority playingAudioSources.Sort(delegate (AudioSource x, AudioSource y) { return x.priority.CompareTo(y.priority); }); for (int i = 16; i < playingAudioSources.Count; ++i) { AddFix("Optimize Audio Source Count", "For CPU performance, please disable all but the top 16 AudioSources.", delegate (UnityEngine.Object obj, bool last, int selected) { AudioSource audioSource = (AudioSource)obj; audioSource.enabled = false; }, playingAudioSources[i], false, "Disable"); } } } var clips = GameObject.FindObjectsOfType<AudioClip>(); for (int i = 0; i < clips.Length; ++i) { if (clips[i].loadType == AudioClipLoadType.DecompressOnLoad) { AddFix("Audio Loading", "For fast loading, please don't use decompress on load for audio clips", delegate (UnityEngine.Object obj, bool last, int selected) { AudioClip thisClip = (AudioClip)obj; if (selected == 0) { SetAudioLoadType(thisClip, AudioClipLoadType.CompressedInMemory, last); } else { SetAudioLoadType(thisClip, AudioClipLoadType.Streaming, last); } }, clips[i], false, "Change to Compressed in Memory", "Change to Streaming"); } if (clips[i].preloadAudioData) { AddFix("Audio Preload", "For fast loading, please don't preload data for audio clips.", delegate (UnityEngine.Object obj, bool last, int selected) { SetAudioPreload(clips[i], false, last); }, clips[i], false, "Fix"); } } if (Physics.defaultContactOffset < 0.01f) { AddFix("Optimize Contact Offset", "For CPU performance, please don't use default contact offset below 0.01.", delegate (UnityEngine.Object obj, bool last, int selected) { Physics.defaultContactOffset = 0.01f; }, null, false, "Fix"); } if (Physics.sleepThreshold < 0.005f) { AddFix("Optimize Sleep Threshold", "For CPU performance, please don't use sleep threshold below 0.005.", delegate (UnityEngine.Object obj, bool last, int selected) { Physics.sleepThreshold = 0.005f; }, null, false, "Fix"); } if (Physics.defaultSolverIterations > 8) { AddFix("Optimize Solver Iterations", "For CPU performance, please don't use excessive solver iteration counts.", delegate (UnityEngine.Object obj, bool last, int selected) { Physics.defaultSolverIterations = 8; }, null, false, "Fix"); } var materials = Resources.FindObjectsOfTypeAll<Material>(); for (int i = 0; i < materials.Length; ++i) { if (materials[i].shader.name.Contains("Parallax") || materials[i].IsKeywordEnabled("_PARALLAXMAP")) { AddFix("Optimize Shading", "For GPU performance, please don't use parallax-mapped materials.", delegate (UnityEngine.Object obj, bool last, int selected) { Material thisMaterial = (Material)obj; if (thisMaterial.IsKeywordEnabled("_PARALLAXMAP")) { thisMaterial.DisableKeyword("_PARALLAXMAP"); } if (thisMaterial.shader.name.Contains("Parallax")) { var newName = thisMaterial.shader.name.Replace("-ParallaxSpec", "-BumpSpec"); newName = newName.Replace("-Parallax", "-Bump"); var newShader = Shader.Find(newName); if (newShader) { thisMaterial.shader = newShader; } else { Debug.LogWarning("Unable to find a replacement for shader " + materials[i].shader.name); } } }, materials[i], false, "Fix"); } } var renderers = GameObject.FindObjectsOfType<Renderer>(); for (int i = 0; i < renderers.Length; ++i) { if (renderers[i].sharedMaterial == null) { AddFix("Instanced Materials", "Please avoid instanced materials on renderers.", null, renderers[i], false); } } var overlays = GameObject.FindObjectsOfType<OVROverlay>(); if (overlays.Length > 4) { AddFix("Optimize VR Layer Count", "For GPU performance, please use 4 or fewer VR layers.", delegate (UnityEngine.Object obj, bool last, int selected) { for (int i = 4; i < OVROverlay.instances.Length; ++i) { OVROverlay.instances[i].enabled = false; } }, null, false, "Fix"); } var splashScreen = PlayerSettings.virtualRealitySplashScreen; if (splashScreen != null) { if (splashScreen.filterMode != FilterMode.Trilinear) { AddFix("Optimize VR Splash Filtering", "For visual quality, please use trilinear filtering on your VR splash screen.", delegate (UnityEngine.Object obj, bool last, int EditorSelectedRenderState) { var assetPath = AssetDatabase.GetAssetPath(splashScreen); var importer = (TextureImporter)TextureImporter.GetAtPath(assetPath); importer.filterMode = FilterMode.Trilinear; AssetDatabase.ImportAsset(assetPath, ImportAssetOptions.ForceUpdate); }, null, false, "Fix"); } if (splashScreen.mipmapCount <= 1) { AddFix("Generate VR Splash Mipmaps", "For visual quality, please use mipmaps with your VR splash screen.", delegate (UnityEngine.Object obj, bool last, int EditorSelectedRenderState) { var assetPath = AssetDatabase.GetAssetPath(splashScreen); var importer = (TextureImporter)TextureImporter.GetAtPath(assetPath); importer.mipmapEnabled = true; AssetDatabase.ImportAsset(assetPath, ImportAssetOptions.ForceUpdate); }, null, false, "Fix"); } } } static void CheckRuntimeCommonIssues() { if (!OVRPlugin.occlusionMesh) { AddFix("Occlusion Mesh", "Enabling the occlusion mesh saves substantial GPU resources, generally with no visual impact. Enable unless you have an exceptional use case.", delegate (UnityEngine.Object obj, bool last, int selected) { OVRPlugin.occlusionMesh = true; }, null, false, "Set OVRPlugin.occlusionMesh = true"); } if (OVRManager.instance != null && !OVRManager.instance.useRecommendedMSAALevel) { AddFix("Optimize MSAA", "OVRManager can select the optimal antialiasing for the installed hardware at runtime. Recommend enabling this.", delegate (UnityEngine.Object obj, bool last, int selected) { var ovrManagers = GameObject.FindObjectsOfType<OVRManager>(); foreach (var ovrManager in ovrManagers) { ovrManager.useRecommendedMSAALevel = true; } }, null, true, "Stop Play and Fix"); } #if UNITY_2017_2_OR_NEWER if (UnityEngine.XR.XRSettings.eyeTextureResolutionScale > 1.5) #else if (UnityEngine.VR.VRSettings.renderScale > 1.5) #endif { AddFix("Optimize Render Scale", "Render scale above 1.5 is extremely expensive on the GPU, with little if any positive visual benefit.", delegate (UnityEngine.Object obj, bool last, int selected) { #if UNITY_2017_2_OR_NEWER UnityEngine.XR.XRSettings.eyeTextureResolutionScale = 1.5f; #else UnityEngine.VR.VRSettings.renderScale = 1.5f; #endif }, null, false, "Fix"); } } static void CheckStaticAndroidIssues() { // Check that the minSDKVersion meets requirement, 21 for Gear and Go, 23 for Quest AndroidSdkVersions recommendedAndroidMinSdkVersion = AndroidSdkVersions.AndroidApiLevel21; if (OVRDeviceSelector.isTargetDeviceQuest) { recommendedAndroidMinSdkVersion = AndroidSdkVersions.AndroidApiLevel23; } if ((int)PlayerSettings.Android.minSdkVersion < (int)recommendedAndroidMinSdkVersion) { AddFix("Set Min Android API Level", "Please require at least API level " + (int)recommendedAndroidMinSdkVersion, delegate (UnityEngine.Object obj, bool last, int selected) { PlayerSettings.Android.minSdkVersion = recommendedAndroidMinSdkVersion; }, null, false, "Fix"); } // Check that compileSDKVersion meets minimal version 26 as required for Quest's headtracking feature // Unity Sets compileSDKVersion in Gradle as the value used in targetSdkVersion AndroidSdkVersions requiredAndroidTargetSdkVersion = AndroidSdkVersions.AndroidApiLevel26; if (OVRDeviceSelector.isTargetDeviceQuest && (int)PlayerSettings.Android.targetSdkVersion < (int)requiredAndroidTargetSdkVersion) { AddFix("Set Android Target SDK Level", "Oculus Quest apps require at least target API level " + (int)requiredAndroidTargetSdkVersion, delegate (UnityEngine.Object obj, bool last, int selected) { PlayerSettings.Android.targetSdkVersion = requiredAndroidTargetSdkVersion; }, null, false, "Fix"); } if (!PlayerSettings.gpuSkinning) { AddFix("Optimize GPU Skinning", "If you are CPU-bound, consider using GPU skinning.", delegate (UnityEngine.Object obj, bool last, int selected) { PlayerSettings.gpuSkinning = true; }, null, false, "Fix"); } if (RenderSettings.skybox) { AddFix("Optimize Clearing", "For GPU performance, please don't use Unity's built-in Skybox.", delegate (UnityEngine.Object obj, bool last, int selected) { RenderSettings.skybox = null; }, null, false, "Clear Skybox"); } var materials = Resources.FindObjectsOfTypeAll<Material>(); for (int i = 0; i < materials.Length; ++i) { if (materials[i].IsKeywordEnabled("_SPECGLOSSMAP") || materials[i].IsKeywordEnabled("_METALLICGLOSSMAP")) { AddFix("Optimize Specular Material", "For GPU performance, please don't use specular shader on materials.", delegate (UnityEngine.Object obj, bool last, int selected) { Material thisMaterial = (Material)obj; thisMaterial.DisableKeyword("_SPECGLOSSMAP"); thisMaterial.DisableKeyword("_METALLICGLOSSMAP"); }, materials[i], false, "Fix"); } if (materials[i].passCount > 1) { AddFix("Material Passes", "Please use 2 or fewer passes in materials.", null, materials[i], false); } } ScriptingImplementation backend = PlayerSettings.GetScriptingBackend(UnityEditor.BuildTargetGroup.Android); if (backend != UnityEditor.ScriptingImplementation.IL2CPP) { AddFix("Optimize Scripting Backend", "For CPU performance, please use IL2CPP.", delegate (UnityEngine.Object obj, bool last, int selected) { PlayerSettings.SetScriptingBackend(UnityEditor.BuildTargetGroup.Android, UnityEditor.ScriptingImplementation.IL2CPP); }, null, false, "Fix"); } var monoBehaviours = GameObject.FindObjectsOfType<MonoBehaviour>(); System.Type effectBaseType = System.Type.GetType("UnityStandardAssets.ImageEffects.PostEffectsBase"); if (effectBaseType != null) { for (int i = 0; i < monoBehaviours.Length; ++i) { if (monoBehaviours[i].GetType().IsSubclassOf(effectBaseType)) { AddFix("Image Effects", "Please don't use image effects.", null, monoBehaviours[i], false); } } } var textures = Resources.FindObjectsOfTypeAll<Texture2D>(); int maxTextureSize = 1024 * (1 << QualitySettings.masterTextureLimit); maxTextureSize = maxTextureSize * maxTextureSize; for (int i = 0; i < textures.Length; ++i) { if (textures[i].filterMode == FilterMode.Trilinear && textures[i].mipmapCount == 1) { AddFix("Optimize Texture Filtering", "For GPU performance, please generate mipmaps or disable trilinear filtering for textures.", delegate (UnityEngine.Object obj, bool last, int selected) { Texture2D thisTexture = (Texture2D)obj; if (selected == 0) { thisTexture.filterMode = FilterMode.Bilinear; } else { SetTextureUseMips(thisTexture, true, last); } }, textures[i], false, "Switch to Bilinear", "Generate Mipmaps"); } } var projectors = GameObject.FindObjectsOfType<Projector>(); if (projectors.Length > 0) { AddFix("Optimize Projectors", "For GPU performance, please don't use projectors.", delegate (UnityEngine.Object obj, bool last, int selected) { Projector[] thisProjectors = GameObject.FindObjectsOfType<Projector>(); for (int i = 0; i < thisProjectors.Length; ++i) { thisProjectors[i].enabled = false; } }, null, false, "Disable Projectors"); } if (EditorUserBuildSettings.androidBuildSubtarget != MobileTextureSubtarget.ASTC) { AddFix("Optimize Texture Compression", "For GPU performance, please use ASTC.", delegate (UnityEngine.Object obj, bool last, int selected) { EditorUserBuildSettings.androidBuildSubtarget = MobileTextureSubtarget.ASTC; }, null, false, "Fix"); } var cameras = GameObject.FindObjectsOfType<Camera>(); int clearCount = 0; for (int i = 0; i < cameras.Length; ++i) { if (cameras[i].clearFlags != CameraClearFlags.Nothing && cameras[i].clearFlags != CameraClearFlags.Depth) ++clearCount; } if (clearCount > 2) { AddFix("Camera Clears", "Please use 2 or fewer clears.", null, null, false); } for (int i = 0; i < cameras.Length; ++i) { if (cameras[i].forceIntoRenderTexture) { AddFix("Optimize Mobile Rendering", "For GPU performance, please don't enable forceIntoRenderTexture on your camera, this might be a flag pollution created by post process stack you used before, \nif your post process had already been turned off, we strongly encourage you to disable forceIntoRenderTexture. If you still want to use post process for some reasons, \nyou can leave this one on, but be warned, enabling this flag will introduce huge GPU performance cost. To view your flag status, please turn on you inspector's debug mode", delegate (UnityEngine.Object obj, bool last, int selected) { Camera thisCamera = (Camera)obj; thisCamera.forceIntoRenderTexture = false; }, cameras[i], false, "Disable forceIntoRenderTexture"); } } } static void CheckRuntimeAndroidIssues() { if (UnityStats.usedTextureMemorySize + UnityStats.vboTotalBytes > 1000000) { AddFix("Graphics Memory", "Please use less than 1GB of vertex and texture memory.", null, null, false); } if (OVRManager.cpuLevel < 0 || OVRManager.cpuLevel > 3) { AddFix("Optimize CPU level", "For battery life, please use a safe CPU level.", delegate (UnityEngine.Object obj, bool last, int selected) { OVRManager.cpuLevel = 2; }, null, false, "Set to CPU2"); } if (OVRManager.gpuLevel < 0 || OVRManager.gpuLevel > 3) { AddFix("Optimize GPU level", "For battery life, please use a safe GPU level.", delegate (UnityEngine.Object obj, bool last, int selected) { OVRManager.gpuLevel = 2; }, null, false, "Set to GPU2"); } if (UnityStats.triangles > 100000 || UnityStats.vertices > 100000) { AddFix("Triangles and Verts", "Please use less than 100000 triangles or vertices.", null, null, false); } // Warn for 50 if in non-VR mode? if (UnityStats.drawCalls > 100) { AddFix("Draw Calls", "Please use less than 100 draw calls.", null, null, false); } } enum LightmapType { Realtime = 4, Baked = 2, Mixed = 1 }; static bool IsLightBaked(Light light) { return light.lightmapBakeType == LightmapBakeType.Baked; } static void SetAudioPreload(AudioClip clip, bool preload, bool refreshImmediately) { if (clip != null) { string assetPath = AssetDatabase.GetAssetPath(clip); AudioImporter importer = AssetImporter.GetAtPath(assetPath) as AudioImporter; if (importer != null) { if (preload != importer.preloadAudioData) { importer.preloadAudioData = preload; AssetDatabase.ImportAsset(assetPath); if (refreshImmediately) { AssetDatabase.Refresh(); } } } } } static void SetAudioLoadType(AudioClip clip, AudioClipLoadType loadType, bool refreshImmediately) { if (clip != null) { string assetPath = AssetDatabase.GetAssetPath(clip); AudioImporter importer = AssetImporter.GetAtPath(assetPath) as AudioImporter; if (importer != null) { if (loadType != importer.defaultSampleSettings.loadType) { AudioImporterSampleSettings settings = importer.defaultSampleSettings; settings.loadType = loadType; importer.defaultSampleSettings = settings; AssetDatabase.ImportAsset(assetPath); if (refreshImmediately) { AssetDatabase.Refresh(); } } } } } public static void SetTextureUseMips(Texture texture, bool useMips, bool refreshImmediately) { if (texture != null) { string assetPath = AssetDatabase.GetAssetPath(texture); TextureImporter tImporter = AssetImporter.GetAtPath(assetPath) as TextureImporter; if (tImporter != null && tImporter.mipmapEnabled != useMips) { tImporter.mipmapEnabled = useMips; AssetDatabase.ImportAsset(assetPath); if (refreshImmediately) { AssetDatabase.Refresh(); } } } } static T FindComponentInParents<T>(GameObject obj) where T : Component { T component = null; if (obj != null) { Transform parent = obj.transform.parent; if (parent != null) { do { component = parent.GetComponent(typeof(T)) as T; parent = parent.parent; } while (parent != null && component == null); } } return component; } } #endif
// // Copyright (c) 2004-2017 Jaroslaw Kowalski <[email protected]>, Kim Christensen, Julian Verdurmen // // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * Neither the name of Jaroslaw Kowalski nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. // namespace NLog.UnitTests.Targets { using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Net; using System.Net.Sockets; using System.Text; using System.Threading; using NLog.Common; using NLog.Config; using NLog.Internal.NetworkSenders; using NLog.Targets; using Xunit; public class NetworkTargetTests : NLogTestBase { [Fact] public void HappyPathDefaultsTest() { this.HappyPathTest(false, LineEndingMode.CRLF, "msg1", "msg2", "msg3"); } [Fact] public void HappyPathCRLFTest() { this.HappyPathTest(true, LineEndingMode.CRLF, "msg1", "msg2", "msg3"); } [Fact] public void HappyPathLFTest() { this.HappyPathTest(true, LineEndingMode.LF, "msg1", "msg2", "msg3"); } private void HappyPathTest(bool newLine, LineEndingMode lineEnding, params string[] messages) { var senderFactory = new MySenderFactory(); var target = new NetworkTarget(); target.Address = "tcp://someaddress/"; target.SenderFactory = senderFactory; target.Layout = "${message}"; target.NewLine = newLine; target.LineEnding = lineEnding; target.KeepConnection = true; target.Initialize(null); var exceptions = new List<Exception>(); var mre = new ManualResetEvent(false); int remaining = 3; AsyncContinuation asyncContinuation = ex => { lock (exceptions) { exceptions.Add(ex); if (--remaining == 0) { mre.Set(); } } }; target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger", "msg1").WithContinuation(asyncContinuation)); target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger", "msg2").WithContinuation(asyncContinuation)); target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger", "msg3").WithContinuation(asyncContinuation)); mre.WaitOne(); foreach (var ex in exceptions) { if (ex != null) { Assert.True(false, ex.ToString()); } } Assert.Single(senderFactory.Senders); var sender = senderFactory.Senders[0]; target.Close(); // Get the length of all the messages and their line endings var eol = newLine ? lineEnding.NewLineCharacters : string.Empty; var eolLength = eol.Length; var length = messages.Sum(m => m.Length) + (eolLength * messages.Length); Assert.Equal(length, sender.MemoryStream.Length); Assert.Equal(string.Join(eol, messages) + eol, target.Encoding.GetString(sender.MemoryStream.GetBuffer(), 0, (int)sender.MemoryStream.Length)); // we invoke the sender for each message, each time sending 4 bytes var actual = senderFactory.Log.ToString(); Assert.True(actual.IndexOf("1: connect tcp://someaddress/") != -1); foreach (var message in messages) { Assert.True(actual.IndexOf($"1: send 0 {message.Length + eolLength}") != -1); } Assert.True(actual.IndexOf("1: close") != -1); } [Fact] public void NetworkTargetDefaultsTest() { var target = new NetworkTarget(); Assert.True(target.KeepConnection); Assert.False(target.NewLine); Assert.Equal("\r\n", target.LineEnding.NewLineCharacters); Assert.Equal(65000, target.MaxMessageSize); Assert.Equal(5, target.ConnectionCacheSize); Assert.Equal(0, target.MaxConnections); Assert.Equal(0, target.MaxQueueSize); Assert.Equal(Encoding.UTF8, target.Encoding); } [Fact] public void NetworkTargetMultipleConnectionsTest() { var senderFactory = new MySenderFactory(); var target = new NetworkTarget(); target.Address = "tcp://${logger}.company.lan/"; target.SenderFactory = senderFactory; target.Layout = "${message}"; target.KeepConnection = true; target.Initialize(null); var exceptions = new List<Exception>(); var mre = new ManualResetEvent(false); int remaining = 3; AsyncContinuation asyncContinuation = ex => { lock (exceptions) { exceptions.Add(ex); if (--remaining == 0) { mre.Set(); } } }; target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger1", "msg1").WithContinuation(asyncContinuation)); target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger2", "msg2").WithContinuation(asyncContinuation)); target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger3", "msg3").WithContinuation(asyncContinuation)); mre.WaitOne(); foreach (var ex in exceptions) { if (ex != null) { Assert.True(false, ex.ToString()); } } mre.Reset(); AsyncContinuation flushContinuation = ex => { mre.Set(); }; target.Flush(flushContinuation); mre.WaitOne(); target.Close(); var actual = senderFactory.Log.ToString(); Assert.True(actual.IndexOf("1: connect tcp://logger1.company.lan/") != -1); Assert.True(actual.IndexOf("1: send 0 4") != -1); Assert.True(actual.IndexOf("2: connect tcp://logger2.company.lan/") != -1); Assert.True(actual.IndexOf("2: send 0 4") != -1); Assert.True(actual.IndexOf("3: connect tcp://logger3.company.lan/") != -1); Assert.True(actual.IndexOf("3: send 0 4") != -1); Assert.True(actual.IndexOf("1: flush") != -1); Assert.True(actual.IndexOf("2: flush") != -1); Assert.True(actual.IndexOf("3: flush") != -1); Assert.True(actual.IndexOf("1: close") != -1); Assert.True(actual.IndexOf("2: close") != -1); Assert.True(actual.IndexOf("3: close") != -1); } [Fact] public void NothingToFlushTest() { var senderFactory = new MySenderFactory(); var target = new NetworkTarget(); target.Address = "tcp://${logger}.company.lan/"; target.SenderFactory = senderFactory; target.Layout = "${message}"; target.KeepConnection = true; target.Initialize(null); var mre = new ManualResetEvent(false); AsyncContinuation flushContinuation = ex => { mre.Set(); }; target.Flush(flushContinuation); mre.WaitOne(); target.Close(); string expectedLog = @""; Assert.Equal(expectedLog, senderFactory.Log.ToString()); } [Fact] public void NetworkTargetMultipleConnectionsWithCacheOverflowTest() { var senderFactory = new MySenderFactory(); var target = new NetworkTarget(); target.Address = "tcp://${logger}.company.lan/"; target.SenderFactory = senderFactory; target.Layout = "${message}"; target.KeepConnection = true; target.ConnectionCacheSize = 2; target.Initialize(null); var exceptions = new List<Exception>(); var mre = new ManualResetEvent(false); int remaining = 6; AsyncContinuation asyncContinuation = ex => { lock (exceptions) { exceptions.Add(ex); if (--remaining == 0) { mre.Set(); } } }; // logger1 should be kept alive because it's being referenced frequently target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger1", "msg1").WithContinuation(asyncContinuation)); target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger2", "msg2").WithContinuation(asyncContinuation)); target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger1", "msg3").WithContinuation(asyncContinuation)); target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger3", "msg1").WithContinuation(asyncContinuation)); target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger1", "msg2").WithContinuation(asyncContinuation)); target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger2", "msg3").WithContinuation(asyncContinuation)); mre.WaitOne(); foreach (var ex in exceptions) { if (ex != null) { Assert.True(false, ex.ToString()); } } target.Close(); string result = senderFactory.Log.ToString(); Assert.True(result.IndexOf("1: connect tcp://logger1.company.lan/") != -1); Assert.True(result.IndexOf("1: send 0 4") != -1); Assert.True(result.IndexOf("2: connect tcp://logger2.company.lan/") != -1); Assert.True(result.IndexOf("2: send 0 4") != -1); Assert.True(result.IndexOf("1: send 0 4") != -1); Assert.True(result.IndexOf("2: close") != -1); Assert.True(result.IndexOf("3: connect tcp://logger3.company.lan/") != -1); Assert.True(result.IndexOf("3: send 0 4") != -1); Assert.True(result.IndexOf("1: send 0 4") != -1); Assert.True(result.IndexOf("3: close") != -1); Assert.True(result.IndexOf("4: connect tcp://logger2.company.lan/") != -1); Assert.True(result.IndexOf("4: send 0 4") != -1); Assert.True(result.IndexOf("1: close") != -1); Assert.True(result.IndexOf("4: close") != -1); } [Fact] public void NetworkTargetMultipleConnectionsWithoutKeepAliveTest() { var senderFactory = new MySenderFactory(); var target = new NetworkTarget(); target.Address = "tcp://${logger}.company.lan/"; target.SenderFactory = senderFactory; target.Layout = "${message}"; target.KeepConnection = false; target.Initialize(null); var exceptions = new List<Exception>(); var mre = new ManualResetEvent(false); int remaining = 6; AsyncContinuation asyncContinuation = ex => { lock (exceptions) { exceptions.Add(ex); if (--remaining == 0) { mre.Set(); } } }; target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger1", "msg1").WithContinuation(asyncContinuation)); target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger2", "msg2").WithContinuation(asyncContinuation)); target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger1", "msg3").WithContinuation(asyncContinuation)); target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger3", "msg1").WithContinuation(asyncContinuation)); target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger1", "msg2").WithContinuation(asyncContinuation)); target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger2", "msg3").WithContinuation(asyncContinuation)); mre.WaitOne(); foreach (var ex in exceptions) { if (ex != null) { Assert.True(false, ex.ToString()); } } target.Close(); string result = senderFactory.Log.ToString(); Assert.True(result.IndexOf("1: connect tcp://logger1.company.lan/") != -1); Assert.True(result.IndexOf("1: send 0 4") != -1); Assert.True(result.IndexOf("1: close") != -1); Assert.True(result.IndexOf("2: connect tcp://logger2.company.lan/") != -1); Assert.True(result.IndexOf("2: send 0 4") != -1); Assert.True(result.IndexOf("2: close") != -1); Assert.True(result.IndexOf("3: connect tcp://logger1.company.lan/") != -1); Assert.True(result.IndexOf("3: send 0 4") != -1); Assert.True(result.IndexOf("3: close") != -1); Assert.True(result.IndexOf("4: connect tcp://logger3.company.lan/") != -1); Assert.True(result.IndexOf("4: send 0 4") != -1); Assert.True(result.IndexOf("4: close") != -1); Assert.True(result.IndexOf("5: connect tcp://logger1.company.lan/") != -1); Assert.True(result.IndexOf("5: send 0 4") != -1); Assert.True(result.IndexOf("5: close") != -1); Assert.True(result.IndexOf("6: connect tcp://logger2.company.lan/") != -1); Assert.True(result.IndexOf("6: send 0 4") != -1); Assert.True(result.IndexOf("6: close") != -1); } [Fact] public void NetworkTargetMultipleConnectionsWithMessageSplitTest() { var senderFactory = new MySenderFactory(); var target = new NetworkTarget(); target.Address = "tcp://${logger}.company.lan/"; target.SenderFactory = senderFactory; target.Layout = "${message}"; target.KeepConnection = true; target.MaxMessageSize = 9; target.OnOverflow = NetworkTargetOverflowAction.Split; target.Initialize(null); var exceptions = new List<Exception>(); var mre = new ManualResetEvent(false); int remaining = 3; AsyncContinuation asyncContinuation = ex => { lock (exceptions) { exceptions.Add(ex); if (--remaining == 0) { mre.Set(); } } }; target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger1", "012345678901234567890123456789").WithContinuation(asyncContinuation)); target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger1", "012345678901234").WithContinuation(asyncContinuation)); target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger2", "012345678901234567890123").WithContinuation(asyncContinuation)); mre.WaitOne(); foreach (var ex in exceptions) { if (ex != null) { Assert.True(false, ex.ToString()); } } target.Close(); var result = senderFactory.Log.ToString(); Assert.True(result.IndexOf("1: connect tcp://logger1.company.lan/") != -1); Assert.True(result.IndexOf("1: send 0 9") != -1); Assert.True(result.IndexOf("1: send 9 9") != -1); Assert.True(result.IndexOf("1: send 18 9") != -1); Assert.True(result.IndexOf("1: send 27 3") != -1); Assert.True(result.IndexOf("1: send 0 9") != -1); Assert.True(result.IndexOf("1: send 9 6") != -1); Assert.True(result.IndexOf("2: connect tcp://logger2.company.lan/") != -1); Assert.True(result.IndexOf("2: send 0 9") != -1); Assert.True(result.IndexOf("2: send 9 9") != -1); Assert.True(result.IndexOf("2: send 18 6") != -1); Assert.True(result.IndexOf("1: close") != -1); Assert.True(result.IndexOf("2: close") != -1); } [Fact] public void NetworkTargetMultipleConnectionsWithMessageDiscardTest() { var senderFactory = new MySenderFactory(); var target = new NetworkTarget(); target.Address = "tcp://${logger}.company.lan/"; target.SenderFactory = senderFactory; target.Layout = "${message}"; target.KeepConnection = true; target.MaxMessageSize = 10; target.OnOverflow = NetworkTargetOverflowAction.Discard; target.Initialize(null); var exceptions = new List<Exception>(); var mre = new ManualResetEvent(false); int remaining = 3; AsyncContinuation asyncContinuation = ex => { lock (exceptions) { exceptions.Add(ex); if (--remaining == 0) { mre.Set(); } } }; target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger1", "0123456").WithContinuation(asyncContinuation)); target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger1", "012345678901234").WithContinuation(asyncContinuation)); target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger2", "01234").WithContinuation(asyncContinuation)); mre.WaitOne(); foreach (var ex in exceptions) { if (ex != null) { Assert.True(false, ex.ToString()); } } target.Close(); string result = senderFactory.Log.ToString(); Assert.True(result.IndexOf("1: connect tcp://logger1.company.lan/") != -1); Assert.True(result.IndexOf("1: send 0 7") != -1); Assert.True(result.IndexOf("2: connect tcp://logger2.company.lan/") != -1); Assert.True(result.IndexOf("2: send 0 5") != -1); Assert.True(result.IndexOf("1: close") != -1); Assert.True(result.IndexOf("2: close") != -1); } [Fact] public void NetworkTargetMultipleConnectionsWithMessageErrorTest() { var senderFactory = new MySenderFactory(); var target = new NetworkTarget(); target.Address = "tcp://${logger}.company.lan/"; target.SenderFactory = senderFactory; target.Layout = "${message}"; target.KeepConnection = true; target.MaxMessageSize = 10; target.OnOverflow = NetworkTargetOverflowAction.Error; target.Initialize(null); var exceptions = new List<Exception>(); var mre = new ManualResetEvent(false); int remaining = 3; AsyncContinuation asyncContinuation = ex => { lock (exceptions) { exceptions.Add(ex); if (--remaining == 0) { mre.Set(); } } }; target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger1", "0123456").WithContinuation(asyncContinuation)); target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger1", "012345678901234").WithContinuation(asyncContinuation)); target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger2", "01234").WithContinuation(asyncContinuation)); mre.WaitOne(); Assert.Null(exceptions[0]); Assert.NotNull(exceptions[1]); Assert.Equal("Attempted to send a message larger than MaxMessageSize (10). Actual size was: 15. Adjust OnOverflow and MaxMessageSize parameters accordingly.", exceptions[1].Message); Assert.Null(exceptions[2]); target.Close(); string result = senderFactory.Log.ToString(); Assert.True(result.IndexOf("1: connect tcp://logger1.company.lan/") != -1); Assert.True(result.IndexOf("1: send 0 7") != -1); Assert.True(result.IndexOf("1: close") != -1); Assert.True(result.IndexOf("2: connect tcp://logger2.company.lan/") != -1); Assert.True(result.IndexOf("2: send 0 5") != -1); Assert.True(result.IndexOf("2: close") != -1); } [Fact] public void NetworkTargetSendFailureTests() { var senderFactory = new MySenderFactory() { FailCounter = 3, // first 3 sends will fail }; var target = new NetworkTarget(); target.Address = "tcp://${logger}.company.lan/"; target.SenderFactory = senderFactory; target.Layout = "${message}"; target.KeepConnection = true; target.OnOverflow = NetworkTargetOverflowAction.Discard; target.Initialize(null); var exceptions = new List<Exception>(); var mre = new ManualResetEvent(false); int remaining = 5; AsyncContinuation asyncContinuation = ex => { lock (exceptions) { exceptions.Add(ex); if (--remaining == 0) { mre.Set(); } } }; target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger1", "0123456").WithContinuation(asyncContinuation)); target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger1", "0123456").WithContinuation(asyncContinuation)); target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger1", "0123456").WithContinuation(asyncContinuation)); target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger1", "0123456").WithContinuation(asyncContinuation)); target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger1", "01234").WithContinuation(asyncContinuation)); mre.WaitOne(); Assert.NotNull(exceptions[0]); Assert.NotNull(exceptions[1]); Assert.NotNull(exceptions[2]); Assert.Null(exceptions[3]); Assert.Null(exceptions[4]); target.Close(); var result = senderFactory.Log.ToString(); Assert.True(result.IndexOf("1: connect tcp://logger1.company.lan/") != -1); Assert.True(result.IndexOf("1: send 0 7") != -1); Assert.True(result.IndexOf("1: failed") != -1); Assert.True(result.IndexOf("1: close") != -1); Assert.True(result.IndexOf("2: connect tcp://logger1.company.lan/") != -1); Assert.True(result.IndexOf("2: send 0 7") != -1); Assert.True(result.IndexOf("2: failed") != -1); Assert.True(result.IndexOf("2: close") != -1); Assert.True(result.IndexOf("3: connect tcp://logger1.company.lan/") != -1); Assert.True(result.IndexOf("3: send 0 7") != -1); Assert.True(result.IndexOf("3: failed") != -1); Assert.True(result.IndexOf("3: close") != -1); Assert.True(result.IndexOf("4: connect tcp://logger1.company.lan/") != -1); Assert.True(result.IndexOf("4: send 0 7") != -1); Assert.True(result.IndexOf("4: send 0 5") != -1); Assert.True(result.IndexOf("4: close") != -1); } [Fact] public void NetworkTargetTcpTest() { NetworkTarget target; target = new NetworkTarget() { Address = "tcp://127.0.0.1:3004", Layout = "${message}\n", KeepConnection = true, }; string expectedResult = string.Empty; using (var listener = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp)) { Exception receiveException = null; var resultStream = new MemoryStream(); var receiveFinished = new ManualResetEvent(false); listener.Bind(new IPEndPoint(IPAddress.Loopback, 3004)); listener.Listen(10); listener.BeginAccept( result => { try { // Console.WriteLine("Accepting..."); byte[] buffer = new byte[4096]; using (Socket connectedSocket = listener.EndAccept(result)) { // Console.WriteLine("Accepted..."); int got; while ((got = connectedSocket.Receive(buffer, 0, buffer.Length, SocketFlags.None)) > 0) { resultStream.Write(buffer, 0, got); } // Console.WriteLine("Closing connection..."); } } catch (Exception ex) { Console.WriteLine("Receive exception {0}", ex); receiveException = ex; } finally { receiveFinished.Set(); } }, null); target.Initialize(new LoggingConfiguration()); int pendingWrites = 100; var writeCompleted = new ManualResetEvent(false); var exceptions = new List<Exception>(); AsyncContinuation writeFinished = ex => { lock (exceptions) { // Console.WriteLine("{0} Write finished {1}", pendingWrites, ex); exceptions.Add(ex); pendingWrites--; if (pendingWrites == 0) { writeCompleted.Set(); } } }; int toWrite = pendingWrites; for (int i = 0; i < toWrite; ++i) { var ev = new LogEventInfo(LogLevel.Info, "logger1", "messagemessagemessagemessagemessage" + i).WithContinuation(writeFinished); target.WriteAsyncLogEvent(ev); expectedResult += "messagemessagemessagemessagemessage" + i + "\n"; } Assert.True(writeCompleted.WaitOne(10000, false), "Writes did not complete"); target.Close(); Assert.True(receiveFinished.WaitOne(10000, false), "Receive did not complete"); string resultString = Encoding.UTF8.GetString(resultStream.GetBuffer(), 0, (int)resultStream.Length); Assert.Null(receiveException); Assert.Equal(expectedResult, resultString); } } [Fact] public void NetworkTargetUdpTest() { var target = new NetworkTarget() { Address = "udp://127.0.0.1:3002", Layout = "${message}\n", KeepConnection = true, }; string expectedResult = string.Empty; using (var listener = new Socket(AddressFamily.InterNetwork, SocketType.Dgram, ProtocolType.Udp)) { Exception receiveException = null; var receivedMessages = new List<string>(); var receiveFinished = new ManualResetEvent(false); byte[] receiveBuffer = new byte[4096]; listener.Bind(new IPEndPoint(IPAddress.Loopback, 3002)); EndPoint remoteEndPoint = null; AsyncCallback receivedDatagram = null; receivedDatagram = result => { try { int got = listener.EndReceiveFrom(result, ref remoteEndPoint); string message = Encoding.UTF8.GetString(receiveBuffer, 0, got); lock (receivedMessages) { receivedMessages.Add(message); if (receivedMessages.Count == 100) { receiveFinished.Set(); } } remoteEndPoint = new IPEndPoint(IPAddress.Any, 0); listener.BeginReceiveFrom(receiveBuffer, 0, receiveBuffer.Length, SocketFlags.None, ref remoteEndPoint, receivedDatagram, null); } catch (Exception ex) { receiveException = ex; } }; remoteEndPoint = new IPEndPoint(IPAddress.Any, 0); listener.BeginReceiveFrom(receiveBuffer, 0, receiveBuffer.Length, SocketFlags.None, ref remoteEndPoint, receivedDatagram, null); target.Initialize(new LoggingConfiguration()); int pendingWrites = 100; var writeCompleted = new ManualResetEvent(false); var exceptions = new List<Exception>(); AsyncContinuation writeFinished = ex => { lock (exceptions) { exceptions.Add(ex); pendingWrites--; if (pendingWrites == 0) { writeCompleted.Set(); } } }; int toWrite = pendingWrites; for (int i = 0; i < toWrite; ++i) { var ev = new LogEventInfo(LogLevel.Info, "logger1", "message" + i).WithContinuation(writeFinished); target.WriteAsyncLogEvent(ev); expectedResult += "message" + i + "\n"; } Assert.True(writeCompleted.WaitOne(10000, false)); target.Close(); Assert.True(receiveFinished.WaitOne(10000, false)); Assert.Equal(toWrite, receivedMessages.Count); for (int i = 0; i < toWrite; ++i) { Assert.True(receivedMessages.Contains("message" + i + "\n"), "Message #" + i + " not received."); } Assert.Null(receiveException); } } [Fact] public void NetworkTargetNotConnectedTest() { var target = new NetworkTarget() { Address = "tcp4://127.0.0.1:33415", Layout = "${message}\n", KeepConnection = true, }; target.Initialize(new LoggingConfiguration()); int toWrite = 10; int pendingWrites = toWrite; var writeCompleted = new ManualResetEvent(false); var exceptions = new List<Exception>(); AsyncContinuation writeFinished = ex => { lock (exceptions) { exceptions.Add(ex); pendingWrites--; if (pendingWrites == 0) { writeCompleted.Set(); } } }; for (int i = 0; i < toWrite; ++i) { var ev = new LogEventInfo(LogLevel.Info, "logger1", "message" + i).WithContinuation(writeFinished); target.WriteAsyncLogEvent(ev); } writeCompleted.WaitOne(); // no exception target.Close(); Assert.Equal(toWrite, exceptions.Count); foreach (var ex in exceptions) { Assert.NotNull(ex); } } [Fact] public void NetworkTargetSendFailureWithoutKeepAliveTests() { var senderFactory = new MySenderFactory() { FailCounter = 3, // first 3 sends will fail }; var target = new NetworkTarget(); target.Address = "tcp://${logger}.company.lan/"; target.SenderFactory = senderFactory; target.Layout = "${message}"; target.KeepConnection = false; target.OnOverflow = NetworkTargetOverflowAction.Discard; target.Initialize(null); var exceptions = new List<Exception>(); var mre = new ManualResetEvent(false); int remaining = 5; AsyncContinuation asyncContinuation = ex => { lock (exceptions) { exceptions.Add(ex); if (--remaining == 0) { mre.Set(); } } }; target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger1", "0123456").WithContinuation(asyncContinuation)); target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger1", "0123456").WithContinuation(asyncContinuation)); target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger1", "0123456").WithContinuation(asyncContinuation)); target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger1", "0123456").WithContinuation(asyncContinuation)); target.WriteAsyncLogEvent(new LogEventInfo(LogLevel.Info, "logger1", "01234").WithContinuation(asyncContinuation)); mre.WaitOne(); Assert.NotNull(exceptions[0]); Assert.NotNull(exceptions[1]); Assert.NotNull(exceptions[2]); Assert.Null(exceptions[3]); Assert.Null(exceptions[4]); target.Close(); var result = senderFactory.Log.ToString(); Assert.True(result.IndexOf("1: connect tcp://logger1.company.lan/") != -1); Assert.True(result.IndexOf("1: send 0 7") != -1); Assert.True(result.IndexOf("1: failed") != -1); Assert.True(result.IndexOf("1: close") != -1); Assert.True(result.IndexOf("2: connect tcp://logger1.company.lan/") != -1); Assert.True(result.IndexOf("2: send 0 7") != -1); Assert.True(result.IndexOf("2: failed") != -1); Assert.True(result.IndexOf("2: close") != -1); Assert.True(result.IndexOf("3: connect tcp://logger1.company.lan/") != -1); Assert.True(result.IndexOf("3: send 0 7") != -1); Assert.True(result.IndexOf("3: failed") != -1); Assert.True(result.IndexOf("3: close") != -1); Assert.True(result.IndexOf("4: connect tcp://logger1.company.lan/") != -1); Assert.True(result.IndexOf("4: send 0 7") != -1); Assert.True(result.IndexOf("4: close") != -1); Assert.True(result.IndexOf("5: connect tcp://logger1.company.lan/") != -1); Assert.True(result.IndexOf("5: send 0 5") != -1); Assert.True(result.IndexOf("5: close") != -1); } internal class MySenderFactory : INetworkSenderFactory { internal List<MyNetworkSender> Senders = new List<MyNetworkSender>(); internal StringWriter Log = new StringWriter(); private int idCounter; public NetworkSender Create(string url, int maximumQueueSize) { var sender = new MyNetworkSender(url, ++this.idCounter, this.Log, this); this.Senders.Add(sender); return sender; } public int FailCounter { get; set; } } internal class MyNetworkSender : NetworkSender { private readonly int id; private readonly TextWriter log; private readonly MySenderFactory senderFactory; internal MemoryStream MemoryStream { get; set; } public MyNetworkSender(string url, int id, TextWriter log, MySenderFactory senderFactory) : base(url) { this.id = id; this.log = log; this.senderFactory = senderFactory; this.MemoryStream = new MemoryStream(); } protected override void DoInitialize() { base.DoInitialize(); this.log.WriteLine("{0}: connect {1}", this.id, this.Address); } protected override void DoFlush(AsyncContinuation continuation) { this.log.WriteLine("{0}: flush", this.id); continuation(null); } protected override void DoClose(AsyncContinuation continuation) { this.log.WriteLine("{0}: close", this.id); continuation(null); } protected override void DoSend(byte[] bytes, int offset, int length, AsyncContinuation asyncContinuation) { this.log.WriteLine("{0}: send {1} {2}", this.id, offset, length); this.MemoryStream.Write(bytes, offset, length); if (this.senderFactory.FailCounter > 0) { this.log.WriteLine("{0}: failed", this.id); this.senderFactory.FailCounter--; asyncContinuation(new IOException("some IO error has occured")); } else { asyncContinuation(null); } } } } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gagvr = Google.Ads.GoogleAds.V9.Resources; using gax = Google.Api.Gax; using gaxgrpc = Google.Api.Gax.Grpc; using gaxgrpccore = Google.Api.Gax.Grpc.GrpcCore; using proto = Google.Protobuf; using grpccore = Grpc.Core; using grpcinter = Grpc.Core.Interceptors; using sys = System; using scg = System.Collections.Generic; using sco = System.Collections.ObjectModel; using st = System.Threading; using stt = System.Threading.Tasks; namespace Google.Ads.GoogleAds.V9.Services { /// <summary>Settings for <see cref="LandingPageViewServiceClient"/> instances.</summary> public sealed partial class LandingPageViewServiceSettings : gaxgrpc::ServiceSettingsBase { /// <summary>Get a new instance of the default <see cref="LandingPageViewServiceSettings"/>.</summary> /// <returns>A new instance of the default <see cref="LandingPageViewServiceSettings"/>.</returns> public static LandingPageViewServiceSettings GetDefault() => new LandingPageViewServiceSettings(); /// <summary> /// Constructs a new <see cref="LandingPageViewServiceSettings"/> object with default settings. /// </summary> public LandingPageViewServiceSettings() { } private LandingPageViewServiceSettings(LandingPageViewServiceSettings existing) : base(existing) { gax::GaxPreconditions.CheckNotNull(existing, nameof(existing)); GetLandingPageViewSettings = existing.GetLandingPageViewSettings; OnCopy(existing); } partial void OnCopy(LandingPageViewServiceSettings existing); /// <summary> /// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to /// <c>LandingPageViewServiceClient.GetLandingPageView</c> and /// <c>LandingPageViewServiceClient.GetLandingPageViewAsync</c>. /// </summary> /// <remarks> /// <list type="bullet"> /// <item><description>Initial retry delay: 5000 milliseconds.</description></item> /// <item><description>Retry delay multiplier: 1.3</description></item> /// <item><description>Retry maximum delay: 60000 milliseconds.</description></item> /// <item><description>Maximum attempts: Unlimited</description></item> /// <item> /// <description> /// Retriable status codes: <see cref="grpccore::StatusCode.Unavailable"/>, /// <see cref="grpccore::StatusCode.DeadlineExceeded"/>. /// </description> /// </item> /// <item><description>Timeout: 3600 seconds.</description></item> /// </list> /// </remarks> public gaxgrpc::CallSettings GetLandingPageViewSettings { get; set; } = gaxgrpc::CallSettingsExtensions.WithRetry(gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(3600000))), gaxgrpc::RetrySettings.FromExponentialBackoff(maxAttempts: 2147483647, initialBackoff: sys::TimeSpan.FromMilliseconds(5000), maxBackoff: sys::TimeSpan.FromMilliseconds(60000), backoffMultiplier: 1.3, retryFilter: gaxgrpc::RetrySettings.FilterForStatusCodes(grpccore::StatusCode.Unavailable, grpccore::StatusCode.DeadlineExceeded))); /// <summary>Creates a deep clone of this object, with all the same property values.</summary> /// <returns>A deep clone of this <see cref="LandingPageViewServiceSettings"/> object.</returns> public LandingPageViewServiceSettings Clone() => new LandingPageViewServiceSettings(this); } /// <summary> /// Builder class for <see cref="LandingPageViewServiceClient"/> to provide simple configuration of credentials, /// endpoint etc. /// </summary> internal sealed partial class LandingPageViewServiceClientBuilder : gaxgrpc::ClientBuilderBase<LandingPageViewServiceClient> { /// <summary>The settings to use for RPCs, or <c>null</c> for the default settings.</summary> public LandingPageViewServiceSettings Settings { get; set; } /// <summary>Creates a new builder with default settings.</summary> public LandingPageViewServiceClientBuilder() { UseJwtAccessWithScopes = LandingPageViewServiceClient.UseJwtAccessWithScopes; } partial void InterceptBuild(ref LandingPageViewServiceClient client); partial void InterceptBuildAsync(st::CancellationToken cancellationToken, ref stt::Task<LandingPageViewServiceClient> task); /// <summary>Builds the resulting client.</summary> public override LandingPageViewServiceClient Build() { LandingPageViewServiceClient client = null; InterceptBuild(ref client); return client ?? BuildImpl(); } /// <summary>Builds the resulting client asynchronously.</summary> public override stt::Task<LandingPageViewServiceClient> BuildAsync(st::CancellationToken cancellationToken = default) { stt::Task<LandingPageViewServiceClient> task = null; InterceptBuildAsync(cancellationToken, ref task); return task ?? BuildAsyncImpl(cancellationToken); } private LandingPageViewServiceClient BuildImpl() { Validate(); grpccore::CallInvoker callInvoker = CreateCallInvoker(); return LandingPageViewServiceClient.Create(callInvoker, Settings); } private async stt::Task<LandingPageViewServiceClient> BuildAsyncImpl(st::CancellationToken cancellationToken) { Validate(); grpccore::CallInvoker callInvoker = await CreateCallInvokerAsync(cancellationToken).ConfigureAwait(false); return LandingPageViewServiceClient.Create(callInvoker, Settings); } /// <summary>Returns the endpoint for this builder type, used if no endpoint is otherwise specified.</summary> protected override string GetDefaultEndpoint() => LandingPageViewServiceClient.DefaultEndpoint; /// <summary> /// Returns the default scopes for this builder type, used if no scopes are otherwise specified. /// </summary> protected override scg::IReadOnlyList<string> GetDefaultScopes() => LandingPageViewServiceClient.DefaultScopes; /// <summary>Returns the channel pool to use when no other options are specified.</summary> protected override gaxgrpc::ChannelPool GetChannelPool() => LandingPageViewServiceClient.ChannelPool; /// <summary>Returns the default <see cref="gaxgrpc::GrpcAdapter"/>to use if not otherwise specified.</summary> protected override gaxgrpc::GrpcAdapter DefaultGrpcAdapter => gaxgrpccore::GrpcCoreAdapter.Instance; } /// <summary>LandingPageViewService client wrapper, for convenient use.</summary> /// <remarks> /// Service to fetch landing page views. /// </remarks> public abstract partial class LandingPageViewServiceClient { /// <summary> /// The default endpoint for the LandingPageViewService service, which is a host of "googleads.googleapis.com" /// and a port of 443. /// </summary> public static string DefaultEndpoint { get; } = "googleads.googleapis.com:443"; /// <summary>The default LandingPageViewService scopes.</summary> /// <remarks> /// The default LandingPageViewService scopes are: /// <list type="bullet"><item><description>https://www.googleapis.com/auth/adwords</description></item></list> /// </remarks> public static scg::IReadOnlyList<string> DefaultScopes { get; } = new sco::ReadOnlyCollection<string>(new string[] { "https://www.googleapis.com/auth/adwords", }); internal static gaxgrpc::ChannelPool ChannelPool { get; } = new gaxgrpc::ChannelPool(DefaultScopes, UseJwtAccessWithScopes); internal static bool UseJwtAccessWithScopes { get { bool useJwtAccessWithScopes = true; MaybeUseJwtAccessWithScopes(ref useJwtAccessWithScopes); return useJwtAccessWithScopes; } } static partial void MaybeUseJwtAccessWithScopes(ref bool useJwtAccessWithScopes); /// <summary> /// Asynchronously creates a <see cref="LandingPageViewServiceClient"/> using the default credentials, endpoint /// and settings. To specify custom credentials or other settings, use /// <see cref="LandingPageViewServiceClientBuilder"/>. /// </summary> /// <param name="cancellationToken"> /// The <see cref="st::CancellationToken"/> to use while creating the client. /// </param> /// <returns>The task representing the created <see cref="LandingPageViewServiceClient"/>.</returns> public static stt::Task<LandingPageViewServiceClient> CreateAsync(st::CancellationToken cancellationToken = default) => new LandingPageViewServiceClientBuilder().BuildAsync(cancellationToken); /// <summary> /// Synchronously creates a <see cref="LandingPageViewServiceClient"/> using the default credentials, endpoint /// and settings. To specify custom credentials or other settings, use /// <see cref="LandingPageViewServiceClientBuilder"/>. /// </summary> /// <returns>The created <see cref="LandingPageViewServiceClient"/>.</returns> public static LandingPageViewServiceClient Create() => new LandingPageViewServiceClientBuilder().Build(); /// <summary> /// Creates a <see cref="LandingPageViewServiceClient"/> which uses the specified call invoker for remote /// operations. /// </summary> /// <param name="callInvoker"> /// The <see cref="grpccore::CallInvoker"/> for remote operations. Must not be null. /// </param> /// <param name="settings">Optional <see cref="LandingPageViewServiceSettings"/>.</param> /// <returns>The created <see cref="LandingPageViewServiceClient"/>.</returns> internal static LandingPageViewServiceClient Create(grpccore::CallInvoker callInvoker, LandingPageViewServiceSettings settings = null) { gax::GaxPreconditions.CheckNotNull(callInvoker, nameof(callInvoker)); grpcinter::Interceptor interceptor = settings?.Interceptor; if (interceptor != null) { callInvoker = grpcinter::CallInvokerExtensions.Intercept(callInvoker, interceptor); } LandingPageViewService.LandingPageViewServiceClient grpcClient = new LandingPageViewService.LandingPageViewServiceClient(callInvoker); return new LandingPageViewServiceClientImpl(grpcClient, settings); } /// <summary> /// Shuts down any channels automatically created by <see cref="Create()"/> and /// <see cref="CreateAsync(st::CancellationToken)"/>. Channels which weren't automatically created are not /// affected. /// </summary> /// <remarks> /// After calling this method, further calls to <see cref="Create()"/> and /// <see cref="CreateAsync(st::CancellationToken)"/> will create new channels, which could in turn be shut down /// by another call to this method. /// </remarks> /// <returns>A task representing the asynchronous shutdown operation.</returns> public static stt::Task ShutdownDefaultChannelsAsync() => ChannelPool.ShutdownChannelsAsync(); /// <summary>The underlying gRPC LandingPageViewService client</summary> public virtual LandingPageViewService.LandingPageViewServiceClient GrpcClient => throw new sys::NotImplementedException(); /// <summary> /// Returns the requested landing page view in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual gagvr::LandingPageView GetLandingPageView(GetLandingPageViewRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Returns the requested landing page view in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<gagvr::LandingPageView> GetLandingPageViewAsync(GetLandingPageViewRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Returns the requested landing page view in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<gagvr::LandingPageView> GetLandingPageViewAsync(GetLandingPageViewRequest request, st::CancellationToken cancellationToken) => GetLandingPageViewAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Returns the requested landing page view in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="resourceName"> /// Required. The resource name of the landing page view to fetch. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual gagvr::LandingPageView GetLandingPageView(string resourceName, gaxgrpc::CallSettings callSettings = null) => GetLandingPageView(new GetLandingPageViewRequest { ResourceName = gax::GaxPreconditions.CheckNotNullOrEmpty(resourceName, nameof(resourceName)), }, callSettings); /// <summary> /// Returns the requested landing page view in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="resourceName"> /// Required. The resource name of the landing page view to fetch. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<gagvr::LandingPageView> GetLandingPageViewAsync(string resourceName, gaxgrpc::CallSettings callSettings = null) => GetLandingPageViewAsync(new GetLandingPageViewRequest { ResourceName = gax::GaxPreconditions.CheckNotNullOrEmpty(resourceName, nameof(resourceName)), }, callSettings); /// <summary> /// Returns the requested landing page view in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="resourceName"> /// Required. The resource name of the landing page view to fetch. /// </param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<gagvr::LandingPageView> GetLandingPageViewAsync(string resourceName, st::CancellationToken cancellationToken) => GetLandingPageViewAsync(resourceName, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Returns the requested landing page view in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="resourceName"> /// Required. The resource name of the landing page view to fetch. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual gagvr::LandingPageView GetLandingPageView(gagvr::LandingPageViewName resourceName, gaxgrpc::CallSettings callSettings = null) => GetLandingPageView(new GetLandingPageViewRequest { ResourceNameAsLandingPageViewName = gax::GaxPreconditions.CheckNotNull(resourceName, nameof(resourceName)), }, callSettings); /// <summary> /// Returns the requested landing page view in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="resourceName"> /// Required. The resource name of the landing page view to fetch. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<gagvr::LandingPageView> GetLandingPageViewAsync(gagvr::LandingPageViewName resourceName, gaxgrpc::CallSettings callSettings = null) => GetLandingPageViewAsync(new GetLandingPageViewRequest { ResourceNameAsLandingPageViewName = gax::GaxPreconditions.CheckNotNull(resourceName, nameof(resourceName)), }, callSettings); /// <summary> /// Returns the requested landing page view in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="resourceName"> /// Required. The resource name of the landing page view to fetch. /// </param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<gagvr::LandingPageView> GetLandingPageViewAsync(gagvr::LandingPageViewName resourceName, st::CancellationToken cancellationToken) => GetLandingPageViewAsync(resourceName, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); } /// <summary>LandingPageViewService client wrapper implementation, for convenient use.</summary> /// <remarks> /// Service to fetch landing page views. /// </remarks> public sealed partial class LandingPageViewServiceClientImpl : LandingPageViewServiceClient { private readonly gaxgrpc::ApiCall<GetLandingPageViewRequest, gagvr::LandingPageView> _callGetLandingPageView; /// <summary> /// Constructs a client wrapper for the LandingPageViewService service, with the specified gRPC client and /// settings. /// </summary> /// <param name="grpcClient">The underlying gRPC client.</param> /// <param name="settings"> /// The base <see cref="LandingPageViewServiceSettings"/> used within this client. /// </param> public LandingPageViewServiceClientImpl(LandingPageViewService.LandingPageViewServiceClient grpcClient, LandingPageViewServiceSettings settings) { GrpcClient = grpcClient; LandingPageViewServiceSettings effectiveSettings = settings ?? LandingPageViewServiceSettings.GetDefault(); gaxgrpc::ClientHelper clientHelper = new gaxgrpc::ClientHelper(effectiveSettings); _callGetLandingPageView = clientHelper.BuildApiCall<GetLandingPageViewRequest, gagvr::LandingPageView>(grpcClient.GetLandingPageViewAsync, grpcClient.GetLandingPageView, effectiveSettings.GetLandingPageViewSettings).WithGoogleRequestParam("resource_name", request => request.ResourceName); Modify_ApiCall(ref _callGetLandingPageView); Modify_GetLandingPageViewApiCall(ref _callGetLandingPageView); OnConstruction(grpcClient, effectiveSettings, clientHelper); } partial void Modify_ApiCall<TRequest, TResponse>(ref gaxgrpc::ApiCall<TRequest, TResponse> call) where TRequest : class, proto::IMessage<TRequest> where TResponse : class, proto::IMessage<TResponse>; partial void Modify_GetLandingPageViewApiCall(ref gaxgrpc::ApiCall<GetLandingPageViewRequest, gagvr::LandingPageView> call); partial void OnConstruction(LandingPageViewService.LandingPageViewServiceClient grpcClient, LandingPageViewServiceSettings effectiveSettings, gaxgrpc::ClientHelper clientHelper); /// <summary>The underlying gRPC LandingPageViewService client</summary> public override LandingPageViewService.LandingPageViewServiceClient GrpcClient { get; } partial void Modify_GetLandingPageViewRequest(ref GetLandingPageViewRequest request, ref gaxgrpc::CallSettings settings); /// <summary> /// Returns the requested landing page view in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public override gagvr::LandingPageView GetLandingPageView(GetLandingPageViewRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_GetLandingPageViewRequest(ref request, ref callSettings); return _callGetLandingPageView.Sync(request, callSettings); } /// <summary> /// Returns the requested landing page view in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public override stt::Task<gagvr::LandingPageView> GetLandingPageViewAsync(GetLandingPageViewRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_GetLandingPageViewRequest(ref request, ref callSettings); return _callGetLandingPageView.Async(request, callSettings); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using Xunit; namespace System.Reflection.Tests { public class GenericGetDefaultMembersTest { public static void TryGetDefaultMembers(string AssemblyQualifiedNameOfTypeToGet, string[] expectedDefaultMembers) { Type typeToCheck; //Run tests typeToCheck = Type.GetType(AssemblyQualifiedNameOfTypeToGet); Assert.NotNull(typeToCheck); MemberInfo[] defaultMembersReturned = typeToCheck.GetDefaultMembers(); Assert.Equal(defaultMembersReturned.Length, expectedDefaultMembers.Length); int foundIndex; Array.Sort(expectedDefaultMembers); for (int i = 0; i < defaultMembersReturned.Length; i++) { foundIndex = Array.BinarySearch(expectedDefaultMembers, defaultMembersReturned[i].ToString()); Assert.False(foundIndex < 0, "An unexpected member " + defaultMembersReturned[i].ToString() + " was returned"); } } public static string ArrayToCommaList(string[] ArrayToConvert) { string returnString = ""; if (ArrayToConvert.Length > 0) { returnString = ArrayToConvert[0]; for (int i = 1; i < ArrayToConvert.Length; i++) { returnString += ", " + ArrayToConvert[i]; } } return returnString; } public static string ArrayToCommaList(MemberInfo[] ArrayToConvert) { string returnString = ""; if (ArrayToConvert.Length > 0) { returnString = ArrayToConvert[0].ToString(); for (int i = 1; i < ArrayToConvert.Length; i++) { returnString += ", " + ArrayToConvert[i].ToString(); } } return returnString; } [Fact] public void Test1() { TryGetDefaultMembers("System.Reflection.Tests.GenericArrayWrapperClass`1[System.String]", new string[] { "System.String Item [Int32]" }); } [Fact] public void Test2() { TryGetDefaultMembers("System.Reflection.Tests.GenericArrayWrapperClass`1", new string[] { "T Item [Int32]" }); } [Fact] public void Test3() { //Test003 TryGetDefaultMembers("System.Reflection.Tests.GenericClass`1", new string[] { "T ReturnAndSetField(T)" }); } [Fact] public void Test4() { //Test004 TryGetDefaultMembers("System.Reflection.Tests.GenericClass`1[System.Int32]", new string[] { "Int32 ReturnAndSetField(Int32)" }); } } // build warnings about unused fields are not applicable to // reflection test cases #pragma warning disable 0169 #pragma warning disable 0067 #region Generics helper classes public class NonGenericClassString { public T method1<T, M>(T p) { return p; } public void method2(int p) { return; } } [DefaultMember("ReturnAndSetField")] public class GenericClass<T> { public T field; public GenericClass(T a) { field = a; } public T ReturnAndSetField(T newFieldValue) { field = newFieldValue; return field; } } public struct GenericStruct<T> { public T field; public T ReturnAndSetField(T newFieldValue) { field = newFieldValue; return field; } } public class GenericClass2TP<T, W> { public T field; public W field2; public T ReturnAndSetField1(T newFieldValue) { field = newFieldValue; return field; } public W ReturnAndSetField2(W newFieldValue) { field2 = newFieldValue; return field2; } } public struct GenericStruct2TP<T, W> { public T field; public W field2; public T ReturnAndSetField1(T newFieldValue) { field = newFieldValue; return field; } public W ReturnAndSetField2(W newFieldValue) { field2 = newFieldValue; return field2; } } public class GenericClassWithInterface<T> : IGenericInterface<T> { public T field; public GenericClassWithInterface(T a) { field = a; } public T ReturnAndSetFieldZero(T newFieldValue) { field = newFieldValue; return field; } public W GenericMethod<W>(W a) { return a; } } public class NonGenericClassWithGenericInterface : IGenericInterface<int> { public int field; public int ReturnAndSetFieldZero(int newFieldValue) { field = newFieldValue; return field; } } public struct GenericStructWithInterface<T> : IGenericInterface<T> { public T field; public int field2; public GenericStructWithInterface(T a) { field = a; field2 = 0; } public GenericStructWithInterface(T a, int b) { field = a; field2 = b; } public T ReturnAndSetFieldZero(T newFieldValue) { field = newFieldValue; return field; } } public interface NonGenericInterface { void SayHello(); } public interface IGenericInterface<T> { T ReturnAndSetFieldZero(T newFieldValue); } public interface IGenericInterface2<T, W> { void SetFieldOne(T newFieldValue); void SetFieldTwo(W newFieldValue); } public interface IGenericInterfaceInherits<U, V> : IGenericInterface<U>, IGenericInterface2<V, U> { V ReturnAndSetFieldThree(V newFieldValue); } public class GenericClassUsingNestedInterfaces<X, Y> : IGenericInterfaceInherits<X, Y> { public X FieldZero; public X FieldOne; public Y FieldTwo; public Y FieldThree; public GenericClassUsingNestedInterfaces(X a, X b, Y c, Y d) { FieldZero = a; FieldOne = b; FieldTwo = c; FieldThree = d; } public X ReturnAndSetFieldZero(X newFieldValue) { FieldZero = newFieldValue; return FieldZero; } public void SetFieldOne(Y newFieldValue) { FieldTwo = newFieldValue; } public void SetFieldTwo(X newFieldValue) { FieldOne = newFieldValue; } public Y ReturnAndSetFieldThree(Y newFieldValue) { FieldThree = newFieldValue; return FieldThree; } } public class GenericClassWithVarArgMethod<T> { public T field; public T publicField { get { return field; } set { field = value; } } public T ReturnAndSetField(T newFieldValue, params T[] moreFieldValues) { field = newFieldValue; for (int i = 0; i <= moreFieldValues.Length - 1; i++) { field = moreFieldValues[i]; } return field; } } public class ClassWithVarArgMethod { public int field; public int publicField { get { return field; } set { field = value; } } public int ReturnAndSetField(int newFieldValue, params int[] moreFieldValues) { field = newFieldValue; for (int i = 0; i <= moreFieldValues.Length - 1; i++) { field = moreFieldValues[i]; } return field; } } public class NonGenericClassWithVarArgGenericMethod { public T ReturnAndSetField<T>(T newFieldValue, params T[] moreFieldValues) { T field; field = newFieldValue; for (int i = 0; i <= moreFieldValues.Length - 1; i++) { field = moreFieldValues[i]; } return field; } } public interface IConsume { object[] StuffConsumed { get; } void Eat(object ThingEaten); object[] Puke(int Amount); } public class PackOfCarnivores<T> where T : IConsume { public T[] pPack; } public class Cat<C> : IConsume { private List<object> _pStuffConsumed = new List<object>(); public event EventHandler WeightChanged; private event EventHandler WeightStayedTheSame; private static EventHandler s_catDisappeared; public object[] StuffConsumed { get { return _pStuffConsumed.ToArray(); } } public void Eat(object ThingEaten) { _pStuffConsumed.Add(ThingEaten); } public object[] Puke(int Amount) { object[] vomit; if (_pStuffConsumed.Count < Amount) { Amount = _pStuffConsumed.Count; } vomit = _pStuffConsumed.GetRange(_pStuffConsumed.Count - Amount, Amount).ToArray(); _pStuffConsumed.RemoveRange(_pStuffConsumed.Count - Amount, Amount); return vomit; } } public class GenericArrayWrapperClass<T> { private T[] _field; private int _field1; public int myProperty { get { return 0; } set { _field1 = value; } } public GenericArrayWrapperClass(T[] fieldValues) { int size = fieldValues.Length; _field = new T[size]; for (int i = 0; i < _field.Length; i++) { _field[i] = fieldValues[i]; } } public T this[int index] { get { return _field[index]; } set { _field[index] = value; } } } public class GenericOuterClass<T> { public T field; public class GenericNestedClass<W> { public T field; public W field2; } public class NestedClass { public T field; public int field2; } } public class OuterClass { public int field; public class GenericNestedClass<W> { public int field; public W field2; } public class NestedClass { public string field; public int field2; } } #endregion #pragma warning restore 0067 #pragma warning restore 0169 }
// // Copyright (c) Microsoft Corporation. All rights reserved. // namespace Microsoft.Zelig.Runtime { using System; using System.Runtime.CompilerServices; using TS = Microsoft.Zelig.Runtime.TypeSystem; [ImplicitInstance] [ForceDevirtualization] public abstract unsafe class MemoryManager { public static class Configuration { public static bool TrashFreeMemory { [ConfigurationOption("MemoryManager__TrashFreeMemory")] get { return true; } } } sealed class EmptyManager : MemoryManager { // // Helper Methods // public override void InitializeMemoryManager() { } //--// public override void ZeroFreeMemory() { } public override UIntPtr Allocate( uint size ) { return new UIntPtr( 0 ); } public override void Release( UIntPtr address , uint size ) { } public override bool RefersToMemory( UIntPtr address ) { return true; } } // // State // protected MemorySegment* m_heapHead; protected MemorySegment* m_heapTail; protected MemorySegment* m_active; // // Helper Methods // public virtual void InitializeMemoryManager() { m_heapHead = null; m_heapTail = null; m_active = null; } public virtual void InitializationComplete() { } public virtual void ZeroFreeMemory() { MemorySegment* ptr = m_heapHead; while(ptr != null) { ptr->ZeroFreeMemory(); ptr = ptr->Next; } } public virtual void DirtyFreeMemory() { MemorySegment* ptr = m_heapHead; while(ptr != null) { ptr->DirtyFreeMemory(); ptr = ptr->Next; } } [TS.WellKnownMethod( "MemoryManager_Allocate" )] public abstract UIntPtr Allocate( uint size ); [TS.WellKnownMethod( "MemoryManager_Release" )] public abstract void Release( UIntPtr address , uint size ); public abstract bool RefersToMemory( UIntPtr address ); //--// protected void AddLinearSection( UIntPtr beginning , UIntPtr end , MemoryAttributes attributes ) { uint size = AddressMath.RangeSize( beginning, end ); if(size >= MemorySegment.MinimumSpaceRequired()) { MemorySegment* seg = (MemorySegment*)beginning.ToPointer(); seg->Next = null; seg->Previous = m_heapTail; seg->Beginning = beginning; seg->End = end; seg->Attributes = attributes; if(m_heapHead == null) { m_heapHead = seg; } if(m_heapTail != null) { m_heapTail->Next = seg; } m_heapTail = seg; seg->Initialize(); } } // // Access Methods // public static extern MemoryManager Instance { [SingletonFactory(Fallback=typeof(EmptyManager))] [MethodImpl( MethodImplOptions.InternalCall )] get; } public static extern Synchronization.YieldLock Lock { [SingletonFactory()] [MethodImpl( MethodImplOptions.InternalCall )] get; } public MemorySegment* StartOfHeap { get { return m_heapHead; } } public uint AvailableMemory { get { uint total = 0; for(MemorySegment* heap = m_heapHead; heap != null; heap = heap->Next) { total += heap->AvailableMemory; } return total; } } public uint AllocatedMemory { get { uint total = 0; for(MemorySegment* heap = m_heapHead; heap != null; heap = heap->Next) { total += heap->AllocatedMemory; } return total; } } } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gagvr = Google.Ads.GoogleAds.V9.Resources; using gax = Google.Api.Gax; using gaxgrpc = Google.Api.Gax.Grpc; using gaxgrpccore = Google.Api.Gax.Grpc.GrpcCore; using proto = Google.Protobuf; using grpccore = Grpc.Core; using grpcinter = Grpc.Core.Interceptors; using sys = System; using scg = System.Collections.Generic; using sco = System.Collections.ObjectModel; using st = System.Threading; using stt = System.Threading.Tasks; namespace Google.Ads.GoogleAds.V9.Services { /// <summary>Settings for <see cref="CampaignBidModifierServiceClient"/> instances.</summary> public sealed partial class CampaignBidModifierServiceSettings : gaxgrpc::ServiceSettingsBase { /// <summary>Get a new instance of the default <see cref="CampaignBidModifierServiceSettings"/>.</summary> /// <returns>A new instance of the default <see cref="CampaignBidModifierServiceSettings"/>.</returns> public static CampaignBidModifierServiceSettings GetDefault() => new CampaignBidModifierServiceSettings(); /// <summary> /// Constructs a new <see cref="CampaignBidModifierServiceSettings"/> object with default settings. /// </summary> public CampaignBidModifierServiceSettings() { } private CampaignBidModifierServiceSettings(CampaignBidModifierServiceSettings existing) : base(existing) { gax::GaxPreconditions.CheckNotNull(existing, nameof(existing)); GetCampaignBidModifierSettings = existing.GetCampaignBidModifierSettings; MutateCampaignBidModifiersSettings = existing.MutateCampaignBidModifiersSettings; OnCopy(existing); } partial void OnCopy(CampaignBidModifierServiceSettings existing); /// <summary> /// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to /// <c>CampaignBidModifierServiceClient.GetCampaignBidModifier</c> and /// <c>CampaignBidModifierServiceClient.GetCampaignBidModifierAsync</c>. /// </summary> /// <remarks> /// <list type="bullet"> /// <item><description>Initial retry delay: 5000 milliseconds.</description></item> /// <item><description>Retry delay multiplier: 1.3</description></item> /// <item><description>Retry maximum delay: 60000 milliseconds.</description></item> /// <item><description>Maximum attempts: Unlimited</description></item> /// <item> /// <description> /// Retriable status codes: <see cref="grpccore::StatusCode.Unavailable"/>, /// <see cref="grpccore::StatusCode.DeadlineExceeded"/>. /// </description> /// </item> /// <item><description>Timeout: 3600 seconds.</description></item> /// </list> /// </remarks> public gaxgrpc::CallSettings GetCampaignBidModifierSettings { get; set; } = gaxgrpc::CallSettingsExtensions.WithRetry(gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(3600000))), gaxgrpc::RetrySettings.FromExponentialBackoff(maxAttempts: 2147483647, initialBackoff: sys::TimeSpan.FromMilliseconds(5000), maxBackoff: sys::TimeSpan.FromMilliseconds(60000), backoffMultiplier: 1.3, retryFilter: gaxgrpc::RetrySettings.FilterForStatusCodes(grpccore::StatusCode.Unavailable, grpccore::StatusCode.DeadlineExceeded))); /// <summary> /// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to /// <c>CampaignBidModifierServiceClient.MutateCampaignBidModifiers</c> and /// <c>CampaignBidModifierServiceClient.MutateCampaignBidModifiersAsync</c>. /// </summary> /// <remarks> /// <list type="bullet"> /// <item><description>Initial retry delay: 5000 milliseconds.</description></item> /// <item><description>Retry delay multiplier: 1.3</description></item> /// <item><description>Retry maximum delay: 60000 milliseconds.</description></item> /// <item><description>Maximum attempts: Unlimited</description></item> /// <item> /// <description> /// Retriable status codes: <see cref="grpccore::StatusCode.Unavailable"/>, /// <see cref="grpccore::StatusCode.DeadlineExceeded"/>. /// </description> /// </item> /// <item><description>Timeout: 3600 seconds.</description></item> /// </list> /// </remarks> public gaxgrpc::CallSettings MutateCampaignBidModifiersSettings { get; set; } = gaxgrpc::CallSettingsExtensions.WithRetry(gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(3600000))), gaxgrpc::RetrySettings.FromExponentialBackoff(maxAttempts: 2147483647, initialBackoff: sys::TimeSpan.FromMilliseconds(5000), maxBackoff: sys::TimeSpan.FromMilliseconds(60000), backoffMultiplier: 1.3, retryFilter: gaxgrpc::RetrySettings.FilterForStatusCodes(grpccore::StatusCode.Unavailable, grpccore::StatusCode.DeadlineExceeded))); /// <summary>Creates a deep clone of this object, with all the same property values.</summary> /// <returns>A deep clone of this <see cref="CampaignBidModifierServiceSettings"/> object.</returns> public CampaignBidModifierServiceSettings Clone() => new CampaignBidModifierServiceSettings(this); } /// <summary> /// Builder class for <see cref="CampaignBidModifierServiceClient"/> to provide simple configuration of credentials, /// endpoint etc. /// </summary> internal sealed partial class CampaignBidModifierServiceClientBuilder : gaxgrpc::ClientBuilderBase<CampaignBidModifierServiceClient> { /// <summary>The settings to use for RPCs, or <c>null</c> for the default settings.</summary> public CampaignBidModifierServiceSettings Settings { get; set; } /// <summary>Creates a new builder with default settings.</summary> public CampaignBidModifierServiceClientBuilder() { UseJwtAccessWithScopes = CampaignBidModifierServiceClient.UseJwtAccessWithScopes; } partial void InterceptBuild(ref CampaignBidModifierServiceClient client); partial void InterceptBuildAsync(st::CancellationToken cancellationToken, ref stt::Task<CampaignBidModifierServiceClient> task); /// <summary>Builds the resulting client.</summary> public override CampaignBidModifierServiceClient Build() { CampaignBidModifierServiceClient client = null; InterceptBuild(ref client); return client ?? BuildImpl(); } /// <summary>Builds the resulting client asynchronously.</summary> public override stt::Task<CampaignBidModifierServiceClient> BuildAsync(st::CancellationToken cancellationToken = default) { stt::Task<CampaignBidModifierServiceClient> task = null; InterceptBuildAsync(cancellationToken, ref task); return task ?? BuildAsyncImpl(cancellationToken); } private CampaignBidModifierServiceClient BuildImpl() { Validate(); grpccore::CallInvoker callInvoker = CreateCallInvoker(); return CampaignBidModifierServiceClient.Create(callInvoker, Settings); } private async stt::Task<CampaignBidModifierServiceClient> BuildAsyncImpl(st::CancellationToken cancellationToken) { Validate(); grpccore::CallInvoker callInvoker = await CreateCallInvokerAsync(cancellationToken).ConfigureAwait(false); return CampaignBidModifierServiceClient.Create(callInvoker, Settings); } /// <summary>Returns the endpoint for this builder type, used if no endpoint is otherwise specified.</summary> protected override string GetDefaultEndpoint() => CampaignBidModifierServiceClient.DefaultEndpoint; /// <summary> /// Returns the default scopes for this builder type, used if no scopes are otherwise specified. /// </summary> protected override scg::IReadOnlyList<string> GetDefaultScopes() => CampaignBidModifierServiceClient.DefaultScopes; /// <summary>Returns the channel pool to use when no other options are specified.</summary> protected override gaxgrpc::ChannelPool GetChannelPool() => CampaignBidModifierServiceClient.ChannelPool; /// <summary>Returns the default <see cref="gaxgrpc::GrpcAdapter"/>to use if not otherwise specified.</summary> protected override gaxgrpc::GrpcAdapter DefaultGrpcAdapter => gaxgrpccore::GrpcCoreAdapter.Instance; } /// <summary>CampaignBidModifierService client wrapper, for convenient use.</summary> /// <remarks> /// Service to manage campaign bid modifiers. /// </remarks> public abstract partial class CampaignBidModifierServiceClient { /// <summary> /// The default endpoint for the CampaignBidModifierService service, which is a host of /// "googleads.googleapis.com" and a port of 443. /// </summary> public static string DefaultEndpoint { get; } = "googleads.googleapis.com:443"; /// <summary>The default CampaignBidModifierService scopes.</summary> /// <remarks> /// The default CampaignBidModifierService scopes are: /// <list type="bullet"><item><description>https://www.googleapis.com/auth/adwords</description></item></list> /// </remarks> public static scg::IReadOnlyList<string> DefaultScopes { get; } = new sco::ReadOnlyCollection<string>(new string[] { "https://www.googleapis.com/auth/adwords", }); internal static gaxgrpc::ChannelPool ChannelPool { get; } = new gaxgrpc::ChannelPool(DefaultScopes, UseJwtAccessWithScopes); internal static bool UseJwtAccessWithScopes { get { bool useJwtAccessWithScopes = true; MaybeUseJwtAccessWithScopes(ref useJwtAccessWithScopes); return useJwtAccessWithScopes; } } static partial void MaybeUseJwtAccessWithScopes(ref bool useJwtAccessWithScopes); /// <summary> /// Asynchronously creates a <see cref="CampaignBidModifierServiceClient"/> using the default credentials, /// endpoint and settings. To specify custom credentials or other settings, use /// <see cref="CampaignBidModifierServiceClientBuilder"/>. /// </summary> /// <param name="cancellationToken"> /// The <see cref="st::CancellationToken"/> to use while creating the client. /// </param> /// <returns>The task representing the created <see cref="CampaignBidModifierServiceClient"/>.</returns> public static stt::Task<CampaignBidModifierServiceClient> CreateAsync(st::CancellationToken cancellationToken = default) => new CampaignBidModifierServiceClientBuilder().BuildAsync(cancellationToken); /// <summary> /// Synchronously creates a <see cref="CampaignBidModifierServiceClient"/> using the default credentials, /// endpoint and settings. To specify custom credentials or other settings, use /// <see cref="CampaignBidModifierServiceClientBuilder"/>. /// </summary> /// <returns>The created <see cref="CampaignBidModifierServiceClient"/>.</returns> public static CampaignBidModifierServiceClient Create() => new CampaignBidModifierServiceClientBuilder().Build(); /// <summary> /// Creates a <see cref="CampaignBidModifierServiceClient"/> which uses the specified call invoker for remote /// operations. /// </summary> /// <param name="callInvoker"> /// The <see cref="grpccore::CallInvoker"/> for remote operations. Must not be null. /// </param> /// <param name="settings">Optional <see cref="CampaignBidModifierServiceSettings"/>.</param> /// <returns>The created <see cref="CampaignBidModifierServiceClient"/>.</returns> internal static CampaignBidModifierServiceClient Create(grpccore::CallInvoker callInvoker, CampaignBidModifierServiceSettings settings = null) { gax::GaxPreconditions.CheckNotNull(callInvoker, nameof(callInvoker)); grpcinter::Interceptor interceptor = settings?.Interceptor; if (interceptor != null) { callInvoker = grpcinter::CallInvokerExtensions.Intercept(callInvoker, interceptor); } CampaignBidModifierService.CampaignBidModifierServiceClient grpcClient = new CampaignBidModifierService.CampaignBidModifierServiceClient(callInvoker); return new CampaignBidModifierServiceClientImpl(grpcClient, settings); } /// <summary> /// Shuts down any channels automatically created by <see cref="Create()"/> and /// <see cref="CreateAsync(st::CancellationToken)"/>. Channels which weren't automatically created are not /// affected. /// </summary> /// <remarks> /// After calling this method, further calls to <see cref="Create()"/> and /// <see cref="CreateAsync(st::CancellationToken)"/> will create new channels, which could in turn be shut down /// by another call to this method. /// </remarks> /// <returns>A task representing the asynchronous shutdown operation.</returns> public static stt::Task ShutdownDefaultChannelsAsync() => ChannelPool.ShutdownChannelsAsync(); /// <summary>The underlying gRPC CampaignBidModifierService client</summary> public virtual CampaignBidModifierService.CampaignBidModifierServiceClient GrpcClient => throw new sys::NotImplementedException(); /// <summary> /// Returns the requested campaign bid modifier in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual gagvr::CampaignBidModifier GetCampaignBidModifier(GetCampaignBidModifierRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Returns the requested campaign bid modifier in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<gagvr::CampaignBidModifier> GetCampaignBidModifierAsync(GetCampaignBidModifierRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Returns the requested campaign bid modifier in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<gagvr::CampaignBidModifier> GetCampaignBidModifierAsync(GetCampaignBidModifierRequest request, st::CancellationToken cancellationToken) => GetCampaignBidModifierAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Returns the requested campaign bid modifier in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="resourceName"> /// Required. The resource name of the campaign bid modifier to fetch. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual gagvr::CampaignBidModifier GetCampaignBidModifier(string resourceName, gaxgrpc::CallSettings callSettings = null) => GetCampaignBidModifier(new GetCampaignBidModifierRequest { ResourceName = gax::GaxPreconditions.CheckNotNullOrEmpty(resourceName, nameof(resourceName)), }, callSettings); /// <summary> /// Returns the requested campaign bid modifier in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="resourceName"> /// Required. The resource name of the campaign bid modifier to fetch. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<gagvr::CampaignBidModifier> GetCampaignBidModifierAsync(string resourceName, gaxgrpc::CallSettings callSettings = null) => GetCampaignBidModifierAsync(new GetCampaignBidModifierRequest { ResourceName = gax::GaxPreconditions.CheckNotNullOrEmpty(resourceName, nameof(resourceName)), }, callSettings); /// <summary> /// Returns the requested campaign bid modifier in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="resourceName"> /// Required. The resource name of the campaign bid modifier to fetch. /// </param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<gagvr::CampaignBidModifier> GetCampaignBidModifierAsync(string resourceName, st::CancellationToken cancellationToken) => GetCampaignBidModifierAsync(resourceName, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Returns the requested campaign bid modifier in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="resourceName"> /// Required. The resource name of the campaign bid modifier to fetch. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual gagvr::CampaignBidModifier GetCampaignBidModifier(gagvr::CampaignBidModifierName resourceName, gaxgrpc::CallSettings callSettings = null) => GetCampaignBidModifier(new GetCampaignBidModifierRequest { ResourceNameAsCampaignBidModifierName = gax::GaxPreconditions.CheckNotNull(resourceName, nameof(resourceName)), }, callSettings); /// <summary> /// Returns the requested campaign bid modifier in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="resourceName"> /// Required. The resource name of the campaign bid modifier to fetch. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<gagvr::CampaignBidModifier> GetCampaignBidModifierAsync(gagvr::CampaignBidModifierName resourceName, gaxgrpc::CallSettings callSettings = null) => GetCampaignBidModifierAsync(new GetCampaignBidModifierRequest { ResourceNameAsCampaignBidModifierName = gax::GaxPreconditions.CheckNotNull(resourceName, nameof(resourceName)), }, callSettings); /// <summary> /// Returns the requested campaign bid modifier in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="resourceName"> /// Required. The resource name of the campaign bid modifier to fetch. /// </param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<gagvr::CampaignBidModifier> GetCampaignBidModifierAsync(gagvr::CampaignBidModifierName resourceName, st::CancellationToken cancellationToken) => GetCampaignBidModifierAsync(resourceName, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Creates, updates, or removes campaign bid modifiers. /// Operation statuses are returned. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [ContextError]() /// [CriterionError]() /// [DatabaseError]() /// [DateError]() /// [DistinctError]() /// [FieldError]() /// [HeaderError]() /// [IdError]() /// [InternalError]() /// [MutateError]() /// [NewResourceCreationError]() /// [NotEmptyError]() /// [NullError]() /// [OperatorError]() /// [QuotaError]() /// [RangeError]() /// [RequestError]() /// [SizeLimitError]() /// [StringFormatError]() /// [StringLengthError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual MutateCampaignBidModifiersResponse MutateCampaignBidModifiers(MutateCampaignBidModifiersRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Creates, updates, or removes campaign bid modifiers. /// Operation statuses are returned. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [ContextError]() /// [CriterionError]() /// [DatabaseError]() /// [DateError]() /// [DistinctError]() /// [FieldError]() /// [HeaderError]() /// [IdError]() /// [InternalError]() /// [MutateError]() /// [NewResourceCreationError]() /// [NotEmptyError]() /// [NullError]() /// [OperatorError]() /// [QuotaError]() /// [RangeError]() /// [RequestError]() /// [SizeLimitError]() /// [StringFormatError]() /// [StringLengthError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<MutateCampaignBidModifiersResponse> MutateCampaignBidModifiersAsync(MutateCampaignBidModifiersRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Creates, updates, or removes campaign bid modifiers. /// Operation statuses are returned. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [ContextError]() /// [CriterionError]() /// [DatabaseError]() /// [DateError]() /// [DistinctError]() /// [FieldError]() /// [HeaderError]() /// [IdError]() /// [InternalError]() /// [MutateError]() /// [NewResourceCreationError]() /// [NotEmptyError]() /// [NullError]() /// [OperatorError]() /// [QuotaError]() /// [RangeError]() /// [RequestError]() /// [SizeLimitError]() /// [StringFormatError]() /// [StringLengthError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<MutateCampaignBidModifiersResponse> MutateCampaignBidModifiersAsync(MutateCampaignBidModifiersRequest request, st::CancellationToken cancellationToken) => MutateCampaignBidModifiersAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Creates, updates, or removes campaign bid modifiers. /// Operation statuses are returned. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [ContextError]() /// [CriterionError]() /// [DatabaseError]() /// [DateError]() /// [DistinctError]() /// [FieldError]() /// [HeaderError]() /// [IdError]() /// [InternalError]() /// [MutateError]() /// [NewResourceCreationError]() /// [NotEmptyError]() /// [NullError]() /// [OperatorError]() /// [QuotaError]() /// [RangeError]() /// [RequestError]() /// [SizeLimitError]() /// [StringFormatError]() /// [StringLengthError]() /// </summary> /// <param name="customerId"> /// Required. ID of the customer whose campaign bid modifiers are being modified. /// </param> /// <param name="operations"> /// Required. The list of operations to perform on individual campaign bid modifiers. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual MutateCampaignBidModifiersResponse MutateCampaignBidModifiers(string customerId, scg::IEnumerable<CampaignBidModifierOperation> operations, gaxgrpc::CallSettings callSettings = null) => MutateCampaignBidModifiers(new MutateCampaignBidModifiersRequest { CustomerId = gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), Operations = { gax::GaxPreconditions.CheckNotNull(operations, nameof(operations)), }, }, callSettings); /// <summary> /// Creates, updates, or removes campaign bid modifiers. /// Operation statuses are returned. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [ContextError]() /// [CriterionError]() /// [DatabaseError]() /// [DateError]() /// [DistinctError]() /// [FieldError]() /// [HeaderError]() /// [IdError]() /// [InternalError]() /// [MutateError]() /// [NewResourceCreationError]() /// [NotEmptyError]() /// [NullError]() /// [OperatorError]() /// [QuotaError]() /// [RangeError]() /// [RequestError]() /// [SizeLimitError]() /// [StringFormatError]() /// [StringLengthError]() /// </summary> /// <param name="customerId"> /// Required. ID of the customer whose campaign bid modifiers are being modified. /// </param> /// <param name="operations"> /// Required. The list of operations to perform on individual campaign bid modifiers. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<MutateCampaignBidModifiersResponse> MutateCampaignBidModifiersAsync(string customerId, scg::IEnumerable<CampaignBidModifierOperation> operations, gaxgrpc::CallSettings callSettings = null) => MutateCampaignBidModifiersAsync(new MutateCampaignBidModifiersRequest { CustomerId = gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), Operations = { gax::GaxPreconditions.CheckNotNull(operations, nameof(operations)), }, }, callSettings); /// <summary> /// Creates, updates, or removes campaign bid modifiers. /// Operation statuses are returned. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [ContextError]() /// [CriterionError]() /// [DatabaseError]() /// [DateError]() /// [DistinctError]() /// [FieldError]() /// [HeaderError]() /// [IdError]() /// [InternalError]() /// [MutateError]() /// [NewResourceCreationError]() /// [NotEmptyError]() /// [NullError]() /// [OperatorError]() /// [QuotaError]() /// [RangeError]() /// [RequestError]() /// [SizeLimitError]() /// [StringFormatError]() /// [StringLengthError]() /// </summary> /// <param name="customerId"> /// Required. ID of the customer whose campaign bid modifiers are being modified. /// </param> /// <param name="operations"> /// Required. The list of operations to perform on individual campaign bid modifiers. /// </param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<MutateCampaignBidModifiersResponse> MutateCampaignBidModifiersAsync(string customerId, scg::IEnumerable<CampaignBidModifierOperation> operations, st::CancellationToken cancellationToken) => MutateCampaignBidModifiersAsync(customerId, operations, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); } /// <summary>CampaignBidModifierService client wrapper implementation, for convenient use.</summary> /// <remarks> /// Service to manage campaign bid modifiers. /// </remarks> public sealed partial class CampaignBidModifierServiceClientImpl : CampaignBidModifierServiceClient { private readonly gaxgrpc::ApiCall<GetCampaignBidModifierRequest, gagvr::CampaignBidModifier> _callGetCampaignBidModifier; private readonly gaxgrpc::ApiCall<MutateCampaignBidModifiersRequest, MutateCampaignBidModifiersResponse> _callMutateCampaignBidModifiers; /// <summary> /// Constructs a client wrapper for the CampaignBidModifierService service, with the specified gRPC client and /// settings. /// </summary> /// <param name="grpcClient">The underlying gRPC client.</param> /// <param name="settings"> /// The base <see cref="CampaignBidModifierServiceSettings"/> used within this client. /// </param> public CampaignBidModifierServiceClientImpl(CampaignBidModifierService.CampaignBidModifierServiceClient grpcClient, CampaignBidModifierServiceSettings settings) { GrpcClient = grpcClient; CampaignBidModifierServiceSettings effectiveSettings = settings ?? CampaignBidModifierServiceSettings.GetDefault(); gaxgrpc::ClientHelper clientHelper = new gaxgrpc::ClientHelper(effectiveSettings); _callGetCampaignBidModifier = clientHelper.BuildApiCall<GetCampaignBidModifierRequest, gagvr::CampaignBidModifier>(grpcClient.GetCampaignBidModifierAsync, grpcClient.GetCampaignBidModifier, effectiveSettings.GetCampaignBidModifierSettings).WithGoogleRequestParam("resource_name", request => request.ResourceName); Modify_ApiCall(ref _callGetCampaignBidModifier); Modify_GetCampaignBidModifierApiCall(ref _callGetCampaignBidModifier); _callMutateCampaignBidModifiers = clientHelper.BuildApiCall<MutateCampaignBidModifiersRequest, MutateCampaignBidModifiersResponse>(grpcClient.MutateCampaignBidModifiersAsync, grpcClient.MutateCampaignBidModifiers, effectiveSettings.MutateCampaignBidModifiersSettings).WithGoogleRequestParam("customer_id", request => request.CustomerId); Modify_ApiCall(ref _callMutateCampaignBidModifiers); Modify_MutateCampaignBidModifiersApiCall(ref _callMutateCampaignBidModifiers); OnConstruction(grpcClient, effectiveSettings, clientHelper); } partial void Modify_ApiCall<TRequest, TResponse>(ref gaxgrpc::ApiCall<TRequest, TResponse> call) where TRequest : class, proto::IMessage<TRequest> where TResponse : class, proto::IMessage<TResponse>; partial void Modify_GetCampaignBidModifierApiCall(ref gaxgrpc::ApiCall<GetCampaignBidModifierRequest, gagvr::CampaignBidModifier> call); partial void Modify_MutateCampaignBidModifiersApiCall(ref gaxgrpc::ApiCall<MutateCampaignBidModifiersRequest, MutateCampaignBidModifiersResponse> call); partial void OnConstruction(CampaignBidModifierService.CampaignBidModifierServiceClient grpcClient, CampaignBidModifierServiceSettings effectiveSettings, gaxgrpc::ClientHelper clientHelper); /// <summary>The underlying gRPC CampaignBidModifierService client</summary> public override CampaignBidModifierService.CampaignBidModifierServiceClient GrpcClient { get; } partial void Modify_GetCampaignBidModifierRequest(ref GetCampaignBidModifierRequest request, ref gaxgrpc::CallSettings settings); partial void Modify_MutateCampaignBidModifiersRequest(ref MutateCampaignBidModifiersRequest request, ref gaxgrpc::CallSettings settings); /// <summary> /// Returns the requested campaign bid modifier in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public override gagvr::CampaignBidModifier GetCampaignBidModifier(GetCampaignBidModifierRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_GetCampaignBidModifierRequest(ref request, ref callSettings); return _callGetCampaignBidModifier.Sync(request, callSettings); } /// <summary> /// Returns the requested campaign bid modifier in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public override stt::Task<gagvr::CampaignBidModifier> GetCampaignBidModifierAsync(GetCampaignBidModifierRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_GetCampaignBidModifierRequest(ref request, ref callSettings); return _callGetCampaignBidModifier.Async(request, callSettings); } /// <summary> /// Creates, updates, or removes campaign bid modifiers. /// Operation statuses are returned. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [ContextError]() /// [CriterionError]() /// [DatabaseError]() /// [DateError]() /// [DistinctError]() /// [FieldError]() /// [HeaderError]() /// [IdError]() /// [InternalError]() /// [MutateError]() /// [NewResourceCreationError]() /// [NotEmptyError]() /// [NullError]() /// [OperatorError]() /// [QuotaError]() /// [RangeError]() /// [RequestError]() /// [SizeLimitError]() /// [StringFormatError]() /// [StringLengthError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public override MutateCampaignBidModifiersResponse MutateCampaignBidModifiers(MutateCampaignBidModifiersRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_MutateCampaignBidModifiersRequest(ref request, ref callSettings); return _callMutateCampaignBidModifiers.Sync(request, callSettings); } /// <summary> /// Creates, updates, or removes campaign bid modifiers. /// Operation statuses are returned. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [ContextError]() /// [CriterionError]() /// [DatabaseError]() /// [DateError]() /// [DistinctError]() /// [FieldError]() /// [HeaderError]() /// [IdError]() /// [InternalError]() /// [MutateError]() /// [NewResourceCreationError]() /// [NotEmptyError]() /// [NullError]() /// [OperatorError]() /// [QuotaError]() /// [RangeError]() /// [RequestError]() /// [SizeLimitError]() /// [StringFormatError]() /// [StringLengthError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public override stt::Task<MutateCampaignBidModifiersResponse> MutateCampaignBidModifiersAsync(MutateCampaignBidModifiersRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_MutateCampaignBidModifiersRequest(ref request, ref callSettings); return _callMutateCampaignBidModifiers.Async(request, callSettings); } } }
// Copyright 2014 The Rector & Visitors of the University of Virginia // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using System.IO; using System.Linq; using System.Reflection; using System.Collections.Generic; using Sensus.Probes; using Sensus.Anonymization; using Sensus.UI.UiProperties; using Sensus.Probes.Location; using Sensus.Probes.User.Scripts; using Sensus.Anonymization.Anonymizers; using Xamarin.Forms; using Newtonsoft.Json; namespace Sensus.UI { /// <summary> /// Displays a single probe. /// </summary> public class ProbePage : ContentPage { /// <summary> /// Initializes a new instance of the <see cref="ProbePage"/> class. /// </summary> /// <param name="probe">Probe to display.</param> public ProbePage(Probe probe) { Title = "Probe"; StackLayout contentLayout = new StackLayout { Orientation = StackOrientation.Vertical, VerticalOptions = LayoutOptions.FillAndExpand }; string type = ""; if (probe is ListeningProbe) type = "Listening"; else if (probe is PollingProbe) type = "Polling"; contentLayout.Children.Add(new ContentView { Content = new Label { Text = probe.DisplayName + (type == "" ? "" : " (" + type + ")"), FontSize = 20, FontAttributes = FontAttributes.Italic, TextColor = Color.Accent, HorizontalOptions = LayoutOptions.Center }, Padding = new Thickness(0, 10, 0, 10) }); foreach (StackLayout stack in UiProperty.GetPropertyStacks(probe)) contentLayout.Children.Add(stack); #region script probes if (probe is ScriptProbe) { ScriptProbe scriptProbe = probe as ScriptProbe; Button editScriptsButton = new Button { Text = "Edit Scripts", FontSize = 20 }; contentLayout.Children.Add(editScriptsButton); editScriptsButton.Clicked += async (o, e) => { await Navigation.PushAsync(new ScriptRunnersPage(scriptProbe)); }; Button shareScriptButton = new Button { Text = "Share Definition", FontSize = 20 }; contentLayout.Children.Add(shareScriptButton); shareScriptButton.Clicked += (o, e) => { string sharePath = SensusServiceHelper.Get().GetSharePath(".json"); using (StreamWriter shareFile = new StreamWriter(sharePath)) { shareFile.WriteLine(JsonConvert.SerializeObject(probe, SensusServiceHelper.JSON_SERIALIZER_SETTINGS)); } SensusServiceHelper.Get().ShareFileAsync(sharePath, "Probe Definition", "application/json"); }; } #endregion #region proximity probe if (probe is IPointsOfInterestProximityProbe) { Button editTriggersButton = new Button { Text = "Edit Triggers", FontSize = 20, HorizontalOptions = LayoutOptions.FillAndExpand }; contentLayout.Children.Add(editTriggersButton); editTriggersButton.Clicked += async (o, e) => { await Navigation.PushAsync(new ProximityTriggersPage(probe as IPointsOfInterestProximityProbe)); }; } #endregion #region anonymization List<PropertyInfo> anonymizableProperties = probe.DatumType.GetProperties().Where(property => property.GetCustomAttribute<Anonymizable>() != null).ToList(); if (anonymizableProperties.Count > 0) { contentLayout.Children.Add(new Label { Text = "Anonymization", FontSize = 20, FontAttributes = FontAttributes.Italic, TextColor = Color.Accent, HorizontalOptions = LayoutOptions.Center }); List<StackLayout> anonymizablePropertyStacks = new List<StackLayout>(); foreach (PropertyInfo anonymizableProperty in anonymizableProperties) { Anonymizable anonymizableAttribute = anonymizableProperty.GetCustomAttribute<Anonymizable>(true); Label propertyLabel = new Label { Text = anonymizableAttribute.PropertyDisplayName ?? anonymizableProperty.Name + ":", FontSize = 20, HorizontalOptions = LayoutOptions.Start }; // populate a picker of anonymizers for the current property Picker anonymizerPicker = new Picker { Title = "Select Anonymizer", HorizontalOptions = LayoutOptions.FillAndExpand }; anonymizerPicker.Items.Add("Do Not Anonymize"); foreach (Anonymizer anonymizer in anonymizableAttribute.AvailableAnonymizers) anonymizerPicker.Items.Add(anonymizer.DisplayText); anonymizerPicker.SelectedIndexChanged += (o, e) => { Anonymizer selectedAnonymizer = null; if (anonymizerPicker.SelectedIndex > 0) selectedAnonymizer = anonymizableAttribute.AvailableAnonymizers[anonymizerPicker.SelectedIndex - 1]; // subtract one from the selected index since the JsonAnonymizer's collection of anonymizers start after the "None" option within the picker. probe.Protocol.JsonAnonymizer.SetAnonymizer(anonymizableProperty, selectedAnonymizer); }; // set the picker's index to the current anonymizer (or "Do Not Anonymize" if there is no current) Anonymizer currentAnonymizer = probe.Protocol.JsonAnonymizer.GetAnonymizer(anonymizableProperty); int currentIndex = 0; if (currentAnonymizer != null) currentIndex = anonymizableAttribute.AvailableAnonymizers.IndexOf(currentAnonymizer) + 1; anonymizerPicker.SelectedIndex = currentIndex; StackLayout anonymizablePropertyStack = new StackLayout { Orientation = StackOrientation.Horizontal, HorizontalOptions = LayoutOptions.FillAndExpand, Children = { propertyLabel, anonymizerPicker } }; anonymizablePropertyStacks.Add(anonymizablePropertyStack); } foreach (StackLayout anonymizablePropertyStack in anonymizablePropertyStacks.OrderBy(s => (s.Children[0] as Label).Text)) contentLayout.Children.Add(anonymizablePropertyStack); } #endregion Content = new ScrollView { Content = contentLayout }; } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis.CodeActions; using Microsoft.CodeAnalysis.CodeRefactorings; using Microsoft.CodeAnalysis.CSharp.Extensions; using Microsoft.CodeAnalysis.CSharp.Syntax; using Microsoft.CodeAnalysis.Shared.Extensions; using Roslyn.Utilities; namespace Microsoft.CodeAnalysis.CSharp.CodeRefactorings.LambdaSimplifier { // [ExportCodeRefactoringProvider(LanguageNames.CSharp, Name = PredefinedCodeRefactoringProviderNames.SimplifyLambda)] internal partial class LambdaSimplifierCodeRefactoringProvider : CodeRefactoringProvider { public override async Task ComputeRefactoringsAsync(CodeRefactoringContext context) { var document = context.Document; var textSpan = context.Span; var cancellationToken = context.CancellationToken; if (cancellationToken.IsCancellationRequested) { return; } if (document.Project.Solution.Workspace.Kind == WorkspaceKind.MiscellaneousFiles) { return; } var semanticDocument = await SemanticDocument.CreateAsync(document, cancellationToken).ConfigureAwait(false); var lambda = semanticDocument.Root.FindToken(textSpan.Start).GetAncestor(n => n is SimpleLambdaExpressionSyntax || n is ParenthesizedLambdaExpressionSyntax); if (lambda == null || !lambda.Span.IntersectsWith(textSpan.Start)) { return; } if (!CanSimplify(semanticDocument, lambda as SimpleLambdaExpressionSyntax, cancellationToken) && !CanSimplify(semanticDocument, lambda as ParenthesizedLambdaExpressionSyntax, cancellationToken)) { return; } context.RegisterRefactoring( new MyCodeAction( CSharpFeaturesResources.Simplify_lambda_expression, (c) => SimplifyLambdaAsync(document, lambda, c))); context.RegisterRefactoring( new MyCodeAction( CSharpFeaturesResources.Simplify_all_occurrences, (c) => SimplifyAllLambdasAsync(document, c))); } private async Task<Document> SimplifyLambdaAsync( Document document, SyntaxNode lambda, CancellationToken cancellationToken) { var semanticDocument = await SemanticDocument.CreateAsync(document, cancellationToken).ConfigureAwait(false); var rewriter = new Rewriter(this, semanticDocument, (n) => n == lambda, cancellationToken); var result = rewriter.Visit(semanticDocument.Root); return document.WithSyntaxRoot(result); } private async Task<Document> SimplifyAllLambdasAsync( Document document, CancellationToken cancellationToken) { var semanticDocument = await SemanticDocument.CreateAsync(document, cancellationToken).ConfigureAwait(false); var rewriter = new Rewriter(this, semanticDocument, (n) => true, cancellationToken); var result = rewriter.Visit(semanticDocument.Root); return document.WithSyntaxRoot(result); } private static bool CanSimplify( SemanticDocument document, SimpleLambdaExpressionSyntax node, CancellationToken cancellationToken) { if (node == null) { return false; } var paramName = node.Parameter.Identifier; var invocation = TryGetInvocationExpression(node.Body); return CanSimplify(document, node, new List<SyntaxToken>() { paramName }, invocation, cancellationToken); } private static bool CanSimplify( SemanticDocument document, ParenthesizedLambdaExpressionSyntax node, CancellationToken cancellationToken) { if (node == null) { return false; } var paramNames = node.ParameterList.Parameters.Select(p => p.Identifier).ToList(); var invocation = TryGetInvocationExpression(node.Body); return CanSimplify(document, node, paramNames, invocation, cancellationToken); } private static bool CanSimplify( SemanticDocument document, ExpressionSyntax lambda, List<SyntaxToken> paramNames, InvocationExpressionSyntax invocation, CancellationToken cancellationToken) { if (invocation == null) { return false; } if (invocation.ArgumentList.Arguments.Count != paramNames.Count) { return false; } for (var i = 0; i < paramNames.Count; i++) { var argument = invocation.ArgumentList.Arguments[i]; if (argument.NameColon != null || argument.RefOrOutKeyword.Kind() != SyntaxKind.None || !argument.Expression.IsKind(SyntaxKind.IdentifierName)) { return false; } var identifierName = (IdentifierNameSyntax)argument.Expression; if (identifierName.Identifier.ValueText != paramNames[i].ValueText) { return false; } } var semanticModel = document.SemanticModel; var lambdaSemanticInfo = semanticModel.GetSymbolInfo(lambda, cancellationToken); var invocationSemanticInfo = semanticModel.GetSymbolInfo(invocation, cancellationToken); if (lambdaSemanticInfo.Symbol == null || invocationSemanticInfo.Symbol == null) { // Don't offer this if there are any errors or ambiguities. return false; } var lambdaMethod = lambdaSemanticInfo.Symbol as IMethodSymbol; var invocationMethod = invocationSemanticInfo.Symbol as IMethodSymbol; if (lambdaMethod == null || invocationMethod == null) { return false; } // TODO(cyrusn): Handle extension methods as well. if (invocationMethod.IsExtensionMethod) { return false; } // Check if any of the parameter is of Type Dynamic foreach (var parameter in lambdaMethod.Parameters) { if (parameter.Type != null && parameter.Type.Kind == SymbolKind.DynamicType) { return false; } } // Check if the parameter and return types match between the lambda and the // invocation. Note: return types can be covariant and argument types can be // contravariant. if (lambdaMethod.ReturnsVoid != invocationMethod.ReturnsVoid || lambdaMethod.Parameters.Length != invocationMethod.Parameters.Length) { return false; } if (!lambdaMethod.ReturnsVoid) { // Return type has to be covariant. var conversion = document.SemanticModel.Compilation.ClassifyConversion( invocationMethod.ReturnType, lambdaMethod.ReturnType); if (!conversion.IsIdentityOrImplicitReference()) { return false; } } // Parameter types have to be contravariant. for (int i = 0; i < lambdaMethod.Parameters.Length; i++) { var conversion = document.SemanticModel.Compilation.ClassifyConversion( lambdaMethod.Parameters[i].Type, invocationMethod.Parameters[i].Type); if (!conversion.IsIdentityOrImplicitReference()) { return false; } } if (WouldCauseAmbiguity(lambda, invocation, semanticModel, cancellationToken)) { return false; } // Looks like something we can simplify. return true; } // Ensure that if we replace the invocation with its expression that its expression will // bind unambiguously. This can happen with awesome cases like: #if false static void Foo<T>(T x) where T : class { } static void Bar(Action<int> x) { } static void Bar(Action<string> x) { } static void Main() { Bar(x => Foo(x)); // error CS0121: The call is ambiguous between the following methods or properties: 'A.Bar(System.Action<int>)' and 'A.Bar(System.Action<string>)' } #endif private static bool WouldCauseAmbiguity( ExpressionSyntax lambda, InvocationExpressionSyntax invocation, SemanticModel oldSemanticModel, CancellationToken cancellationToken) { var annotation = new SyntaxAnnotation(); // In order to check if there will be a problem, we actually make the change, fork the // compilation, and then verify that the new expression bound unambiguously. var oldExpression = invocation.Expression.WithAdditionalAnnotations(annotation); var oldCompilation = oldSemanticModel.Compilation; var oldTree = oldSemanticModel.SyntaxTree; var oldRoot = oldTree.GetRoot(cancellationToken); var newRoot = oldRoot.ReplaceNode(lambda, oldExpression); var newTree = oldTree.WithRootAndOptions(newRoot, oldTree.Options); var newCompilation = oldCompilation.ReplaceSyntaxTree(oldTree, newTree); var newExpression = newTree.GetRoot(cancellationToken).GetAnnotatedNodesAndTokens(annotation).First().AsNode(); var newSemanticModel = newCompilation.GetSemanticModel(newTree); var info = newSemanticModel.GetSymbolInfo(newExpression, cancellationToken); return info.CandidateReason != CandidateReason.None; } private static InvocationExpressionSyntax TryGetInvocationExpression( SyntaxNode lambdaBody) { if (lambdaBody is ExpressionSyntax exprBody) { return exprBody.WalkDownParentheses() as InvocationExpressionSyntax; } else if (lambdaBody is BlockSyntax block) { if (block.Statements.Count == 1) { var statement = block.Statements.First(); if (statement is ReturnStatementSyntax returnStatement) { return returnStatement.Expression.WalkDownParentheses() as InvocationExpressionSyntax; } else if (statement is ExpressionStatementSyntax exprStatement) { return exprStatement.Expression.WalkDownParentheses() as InvocationExpressionSyntax; } } } return null; } private class MyCodeAction : CodeAction.DocumentChangeAction { public MyCodeAction(string title, Func<CancellationToken, Task<Document>> createChangedDocument) : base(title, createChangedDocument) { } } } }
/* Copyright (c) 2005 Poderosa Project, All Rights Reserved. This file is a part of the Granados SSH Client Library that is subject to the license included in the distributed package. You may not use this file except in compliance with the license. $Id: SSH1Packet.cs,v 1.4 2011/10/27 23:21:56 kzmi Exp $ */ /* * structure of packet * * length(4) padding(1-8) type(1) data(0+) crc(4) * * 1. length = type+data+crc * 2. the length of padding+type+data+crc must be a multiple of 8 * 3. padding length must be 1 at least * 4. crc is calculated from padding,type and data * */ using System; using System.Collections; using System.Diagnostics; using System.Threading; using Granados.Crypto; using Granados.IO; using Granados.IO.SSH1; using Granados.Util; namespace Granados.SSH1 { internal class SSH1Packet { private byte _type; private byte[] _data; private uint _CRC; /** * constructs from the packet type and the body */ public static SSH1Packet FromPlainPayload(PacketType type, byte[] data) { SSH1Packet p = new SSH1Packet(); p._type = (byte)type; p._data = data; return p; } public static SSH1Packet FromPlainPayload(PacketType type) { SSH1Packet p = new SSH1Packet(); p._type = (byte)type; p._data = new byte[0]; return p; } /** * creates a packet as the input of shell */ static SSH1Packet AsStdinString(byte[] input) { SSH1DataWriter w = new SSH1DataWriter(); w.WriteAsString(input); SSH1Packet p = SSH1Packet.FromPlainPayload(PacketType.SSH_CMSG_STDIN_DATA, w.ToByteArray()); return p; } private byte[] BuildImage() { int packet_length = (_data == null ? 0 : _data.Length) + 5; //type and CRC int padding_length = 8 - (packet_length % 8); byte[] image = new byte[packet_length + padding_length + 4]; SSHUtil.WriteIntToByteArray(image, 0, packet_length); for (int i = 0; i < padding_length; i++) image[4 + i] = 0; //padding: filling by random values is better image[4 + padding_length] = _type; if (_data != null) Array.Copy(_data, 0, image, 4 + padding_length + 1, _data.Length); _CRC = CRC.Calc(image, 4, image.Length - 8); SSHUtil.WriteIntToByteArray(image, image.Length - 4, (int)_CRC); return image; } /** * writes to plain stream */ public void WriteTo(AbstractGranadosSocket output) { byte[] image = BuildImage(); output.Write(image, 0, image.Length); } /** * writes to encrypted stream */ public void WriteTo(AbstractGranadosSocket output, Cipher cipher) { byte[] image = BuildImage(); //dumpBA(image); byte[] encrypted = new byte[image.Length - 4]; cipher.Encrypt(image, 4, image.Length - 4, encrypted, 0); //length field must not be encrypted Array.Copy(encrypted, 0, image, 4, encrypted.Length); output.Write(image, 0, image.Length); } public PacketType Type { get { return (PacketType)_type; } } public byte[] Data { get { return _data; } } public int DataLength { get { return _data == null ? 0 : _data.Length; } } } internal class CallbackSSH1PacketHandler : IDataHandler { internal SSH1Connection _connection; internal CallbackSSH1PacketHandler(SSH1Connection con) { _connection = con; } public void OnData(DataFragment data) { _connection.AsyncReceivePacket(data); } public void OnError(Exception error) { _connection.EventReceiver.OnError(error); } public void OnClosed() { _connection.EventReceiver.OnConnectionClosed(); } } internal class SSH1PacketBuilder : FilterDataHandler { private byte[] _buffer; private int _readOffset; private int _writeOffset; private Cipher _cipher; private bool _checkMAC; public SSH1PacketBuilder(IDataHandler handler) : base(handler) { _buffer = new byte[0x1000]; _readOffset = 0; _writeOffset = 0; _cipher = null; _checkMAC = false; } public void SetCipher(Cipher c, bool check_mac) { _cipher = c; _checkMAC = check_mac; } public override void OnData(DataFragment data) { try { while (_buffer.Length - _writeOffset < data.Length) ExpandBuffer(); Array.Copy(data.Data, data.Offset, _buffer, _writeOffset, data.Length); _writeOffset += data.Length; DataFragment p = ConstructPacket(); while (p != null) { _inner_handler.OnData(p); p = ConstructPacket(); } ReduceBuffer(); } catch (Exception ex) { _inner_handler.OnError(ex); } } //returns true if a new packet could be obtained private DataFragment ConstructPacket() { if (_writeOffset - _readOffset < 4) return null; int packet_length = SSHUtil.ReadInt32(_buffer, _readOffset); int padding_length = 8 - (packet_length % 8); //padding length int total = packet_length + padding_length; if (_writeOffset - _readOffset < 4 + total) return null; byte[] decrypted = new byte[total]; if (_cipher != null) _cipher.Decrypt(_buffer, _readOffset + 4, total, decrypted, 0); else Array.Copy(_buffer, _readOffset + 4, decrypted, 0, total); _readOffset += 4 + total; SSH1Packet p = new SSH1Packet(); return ConstructAndCheck(decrypted, packet_length, padding_length, _checkMAC); } /** * reads type, data, and crc from byte array. * an exception is thrown if crc check fails. */ private DataFragment ConstructAndCheck(byte[] buf, int packet_length, int padding_length, bool check_crc) { int body_len = packet_length - 4; byte[] body = new byte[body_len]; Array.Copy(buf, padding_length, body, 0, body_len); uint received_crc = (uint)SSHUtil.ReadInt32(buf, buf.Length - 4); if (check_crc) { uint crc = CRC.Calc(buf, 0, buf.Length - 4); if (received_crc != crc) throw new SSHException("CRC Error", buf); } return new DataFragment(body, 0, body_len); } private void ExpandBuffer() { byte[] t = new byte[_buffer.Length * 2]; Array.Copy(_buffer, 0, t, 0, _buffer.Length); _buffer = t; } private void ReduceBuffer() { if (_readOffset == _writeOffset) { _readOffset = 0; _writeOffset = 0; } else { byte[] temp = new byte[_writeOffset - _readOffset]; Array.Copy(_buffer, _readOffset, temp, 0, temp.Length); Array.Copy(temp, 0, _buffer, 0, temp.Length); _readOffset = 0; _writeOffset = temp.Length; } } } }
namespace Ocelot.AcceptanceTests { using System; using System.Collections.Generic; using Ocelot.Configuration.File; using Shouldly; using Xunit; public class CannotStartOcelotTests : IDisposable { private readonly Steps _steps; public CannotStartOcelotTests() { _steps = new Steps(); } [Fact] public void should_throw_exception_if_cannot_start_because_service_discovery_provider_specified_in_config_but_no_service_discovery_provider_registered_with_dynamic_re_routes() { var invalidConfig = new FileConfiguration { GlobalConfiguration = new FileGlobalConfiguration { ServiceDiscoveryProvider = new FileServiceDiscoveryProvider { Host = "localhost", Type = "consul", Port = 8500 } } }; Exception exception = null; _steps.GivenThereIsAConfiguration(invalidConfig); try { _steps.GivenOcelotIsRunning(); } catch (Exception ex) { exception = ex; } exception.ShouldNotBeNull(); exception.Message.ShouldBe("One or more errors occurred. (Unable to start Ocelot, errors are: Unable to start Ocelot, errors are: Unable to start Ocelot because either a ReRoute or GlobalConfiguration are using ServiceDiscoveryOptions but no ServiceDiscoveryFinderDelegate has been registered in dependency injection container. Are you missing a package like Ocelot.Provider.Consul and services.AddConsul() or Ocelot.Provider.Eureka and services.AddEureka()?)"); } [Fact] public void should_throw_exception_if_cannot_start_because_service_discovery_provider_specified_in_config_but_no_service_discovery_provider_registered() { var invalidConfig = new FileConfiguration { ReRoutes = new List<FileReRoute> { new FileReRoute { DownstreamPathTemplate = "/", DownstreamScheme = "http", UpstreamPathTemplate = "/laura", UpstreamHttpMethod = new List<string> { "Get" }, ServiceName = "test" } }, GlobalConfiguration = new FileGlobalConfiguration { ServiceDiscoveryProvider = new FileServiceDiscoveryProvider { Host = "localhost", Type = "consul", Port = 8500 } } }; Exception exception = null; _steps.GivenThereIsAConfiguration(invalidConfig); try { _steps.GivenOcelotIsRunning(); } catch (Exception ex) { exception = ex; } exception.ShouldNotBeNull(); exception.Message.ShouldBe("One or more errors occurred. (Unable to start Ocelot, errors are: Unable to start Ocelot, errors are: Unable to start Ocelot because either a ReRoute or GlobalConfiguration are using ServiceDiscoveryOptions but no ServiceDiscoveryFinderDelegate has been registered in dependency injection container. Are you missing a package like Ocelot.Provider.Consul and services.AddConsul() or Ocelot.Provider.Eureka and services.AddEureka()?,Unable to start Ocelot, errors are: Unable to start Ocelot because either a ReRoute or GlobalConfiguration are using ServiceDiscoveryOptions but no ServiceDiscoveryFinderDelegate has been registered in dependency injection container. Are you missing a package like Ocelot.Provider.Consul and services.AddConsul() or Ocelot.Provider.Eureka and services.AddEureka()?)"); } [Fact] public void should_throw_exception_if_cannot_start_because_no_qos_delegate_registered_globally() { var invalidConfig = new FileConfiguration { ReRoutes = new List<FileReRoute> { new FileReRoute { DownstreamPathTemplate = "/", DownstreamScheme = "http", DownstreamHostAndPorts = new List<FileHostAndPort> { new FileHostAndPort { Host = "localhost", Port = 51878, } }, UpstreamPathTemplate = "/laura", UpstreamHttpMethod = new List<string> { "Get" }, Key = "Laura", } }, GlobalConfiguration = new FileGlobalConfiguration { QoSOptions = new FileQoSOptions { TimeoutValue = 1, ExceptionsAllowedBeforeBreaking = 1 } } }; Exception exception = null; _steps.GivenThereIsAConfiguration(invalidConfig); try { _steps.GivenOcelotIsRunning(); } catch (Exception ex) { exception = ex; } exception.ShouldNotBeNull(); exception.Message.ShouldBe("One or more errors occurred. (Unable to start Ocelot, errors are: Unable to start Ocelot because either a ReRoute or GlobalConfiguration are using QoSOptions but no QosDelegatingHandlerDelegate has been registered in dependency injection container. Are you missing a package like Ocelot.Provider.Polly and services.AddPolly()?)"); } [Fact] public void should_throw_exception_if_cannot_start_because_no_qos_delegate_registered_for_re_route() { var invalidConfig = new FileConfiguration { ReRoutes = new List<FileReRoute> { new FileReRoute { DownstreamPathTemplate = "/", DownstreamScheme = "http", DownstreamHostAndPorts = new List<FileHostAndPort> { new FileHostAndPort { Host = "localhost", Port = 51878, } }, UpstreamPathTemplate = "/laura", UpstreamHttpMethod = new List<string> { "Get" }, Key = "Laura", QoSOptions = new FileQoSOptions { TimeoutValue = 1, ExceptionsAllowedBeforeBreaking = 1 } } } }; Exception exception = null; _steps.GivenThereIsAConfiguration(invalidConfig); try { _steps.GivenOcelotIsRunning(); } catch (Exception ex) { exception = ex; } exception.ShouldNotBeNull(); exception.Message.ShouldBe("One or more errors occurred. (Unable to start Ocelot, errors are: Unable to start Ocelot because either a ReRoute or GlobalConfiguration are using QoSOptions but no QosDelegatingHandlerDelegate has been registered in dependency injection container. Are you missing a package like Ocelot.Provider.Polly and services.AddPolly()?)"); } [Fact] public void should_throw_exception_if_cannot_start() { var invalidConfig = new FileConfiguration() { ReRoutes = new List<FileReRoute> { new FileReRoute { UpstreamPathTemplate = "api", DownstreamPathTemplate = "test" } } }; Exception exception = null; _steps.GivenThereIsAConfiguration(invalidConfig); try { _steps.GivenOcelotIsRunning(); } catch (Exception ex) { exception = ex; } exception.ShouldNotBeNull(); exception.Message.ShouldBe("One or more errors occurred. (Unable to start Ocelot, errors are: Downstream Path Template test doesnt start with forward slash,Upstream Path Template api doesnt start with forward slash,When not using service discovery DownstreamHostAndPorts must be set and not empty or Ocelot cannot find your service!)"); } public void Dispose() { _steps.Dispose(); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.IO; using System.Collections; using System.Text; using System.Diagnostics; using System.Xml.Schema; using System.Xml.XPath; using MS.Internal.Xml.XPath; using System.Globalization; namespace System.Xml { // Represents a single node in the document. [DebuggerDisplay("{debuggerDisplayProxy}")] public abstract class XmlNode : ICloneable, IEnumerable, IXPathNavigable { internal XmlNode parentNode; //this pointer is reused to save the userdata information, need to prevent internal user access the pointer directly. internal XmlNode() { } internal XmlNode(XmlDocument doc) { if (doc == null) throw new ArgumentException(SR.Xdom_Node_Null_Doc); this.parentNode = doc; } public virtual XPathNavigator CreateNavigator() { XmlDocument thisAsDoc = this as XmlDocument; if (thisAsDoc != null) { return thisAsDoc.CreateNavigator(this); } XmlDocument doc = OwnerDocument; Debug.Assert(doc != null); return doc.CreateNavigator(this); } // Selects the first node that matches the xpath expression public XmlNode SelectSingleNode(string xpath) { XmlNodeList list = SelectNodes(xpath); // SelectNodes returns null for certain node types return list != null ? list[0] : null; } // Selects the first node that matches the xpath expression and given namespace context. public XmlNode SelectSingleNode(string xpath, XmlNamespaceManager nsmgr) { XPathNavigator xn = (this).CreateNavigator(); //if the method is called on node types like DocType, Entity, XmlDeclaration, //the navigator returned is null. So just return null from here for those node types. if (xn == null) return null; XPathExpression exp = xn.Compile(xpath); exp.SetContext(nsmgr); return new XPathNodeList(xn.Select(exp))[0]; } // Selects all nodes that match the xpath expression public XmlNodeList SelectNodes(string xpath) { XPathNavigator n = (this).CreateNavigator(); //if the method is called on node types like DocType, Entity, XmlDeclaration, //the navigator returned is null. So just return null from here for those node types. if (n == null) return null; return new XPathNodeList(n.Select(xpath)); } // Selects all nodes that match the xpath expression and given namespace context. public XmlNodeList SelectNodes(string xpath, XmlNamespaceManager nsmgr) { XPathNavigator xn = (this).CreateNavigator(); //if the method is called on node types like DocType, Entity, XmlDeclaration, //the navigator returned is null. So just return null from here for those node types. if (xn == null) return null; XPathExpression exp = xn.Compile(xpath); exp.SetContext(nsmgr); return new XPathNodeList(xn.Select(exp)); } // Gets the name of the node. public abstract string Name { get; } // Gets or sets the value of the node. public virtual string Value { get { return null; } set { throw new InvalidOperationException(string.Format(CultureInfo.InvariantCulture, SR.Xdom_Node_SetVal, NodeType.ToString())); } } // Gets the type of the current node. public abstract XmlNodeType NodeType { get; } // Gets the parent of this node (for nodes that can have parents). public virtual XmlNode ParentNode { get { Debug.Assert(parentNode != null); if (parentNode.NodeType != XmlNodeType.Document) { return parentNode; } // Linear lookup through the children of the document XmlLinkedNode firstChild = parentNode.FirstChild as XmlLinkedNode; if (firstChild != null) { XmlLinkedNode node = firstChild; do { if (node == this) { return parentNode; } node = node.next; } while (node != null && node != firstChild); } return null; } } // Gets all children of this node. public virtual XmlNodeList ChildNodes { get { return new XmlChildNodes(this); } } // Gets the node immediately preceding this node. public virtual XmlNode PreviousSibling { get { return null; } } // Gets the node immediately following this node. public virtual XmlNode NextSibling { get { return null; } } // Gets a XmlAttributeCollection containing the attributes // of this node. public virtual XmlAttributeCollection Attributes { get { return null; } } // Gets the XmlDocument that contains this node. public virtual XmlDocument OwnerDocument { get { Debug.Assert(parentNode != null); if (parentNode.NodeType == XmlNodeType.Document) return (XmlDocument)parentNode; return parentNode.OwnerDocument; } } // Gets the first child of this node. public virtual XmlNode FirstChild { get { XmlLinkedNode linkedNode = LastNode; if (linkedNode != null) return linkedNode.next; return null; } } // Gets the last child of this node. public virtual XmlNode LastChild { get { return LastNode; } } internal virtual bool IsContainer { get { return false; } } internal virtual XmlLinkedNode LastNode { get { return null; } set { } } internal bool AncestorNode(XmlNode node) { XmlNode n = this.ParentNode; while (n != null && n != this) { if (n == node) return true; n = n.ParentNode; } return false; } //trace to the top to find out its parent node. internal bool IsConnected() { XmlNode parent = ParentNode; while (parent != null && !(parent.NodeType == XmlNodeType.Document)) parent = parent.ParentNode; return parent != null; } // Inserts the specified node immediately before the specified reference node. public virtual XmlNode InsertBefore(XmlNode newChild, XmlNode refChild) { if (this == newChild || AncestorNode(newChild)) throw new ArgumentException(SR.Xdom_Node_Insert_Child); if (refChild == null) return AppendChild(newChild); if (!IsContainer) throw new InvalidOperationException(SR.Xdom_Node_Insert_Contain); if (refChild.ParentNode != this) throw new ArgumentException(SR.Xdom_Node_Insert_Path); if (newChild == refChild) return newChild; XmlDocument childDoc = newChild.OwnerDocument; XmlDocument thisDoc = OwnerDocument; if (childDoc != null && childDoc != thisDoc && childDoc != this) throw new ArgumentException(SR.Xdom_Node_Insert_Context); if (!CanInsertBefore(newChild, refChild)) throw new InvalidOperationException(SR.Xdom_Node_Insert_Location); if (newChild.ParentNode != null) newChild.ParentNode.RemoveChild(newChild); // special case for doc-fragment. if (newChild.NodeType == XmlNodeType.DocumentFragment) { XmlNode first = newChild.FirstChild; XmlNode node = first; if (node != null) { newChild.RemoveChild(node); InsertBefore(node, refChild); // insert the rest of the children after this one. InsertAfter(newChild, node); } return first; } if (!(newChild is XmlLinkedNode) || !IsValidChildType(newChild.NodeType)) throw new InvalidOperationException(SR.Xdom_Node_Insert_TypeConflict); XmlLinkedNode newNode = (XmlLinkedNode)newChild; XmlLinkedNode refNode = (XmlLinkedNode)refChild; string newChildValue = newChild.Value; XmlNodeChangedEventArgs args = GetEventArgs(newChild, newChild.ParentNode, this, newChildValue, newChildValue, XmlNodeChangedAction.Insert); if (args != null) BeforeEvent(args); if (refNode == FirstChild) { newNode.next = refNode; LastNode.next = newNode; newNode.SetParent(this); if (newNode.IsText) { if (refNode.IsText) { NestTextNodes(newNode, refNode); } } } else { XmlLinkedNode prevNode = (XmlLinkedNode)refNode.PreviousSibling; newNode.next = refNode; prevNode.next = newNode; newNode.SetParent(this); if (prevNode.IsText) { if (newNode.IsText) { NestTextNodes(prevNode, newNode); if (refNode.IsText) { NestTextNodes(newNode, refNode); } } else { if (refNode.IsText) { UnnestTextNodes(prevNode, refNode); } } } else { if (newNode.IsText) { if (refNode.IsText) { NestTextNodes(newNode, refNode); } } } } if (args != null) AfterEvent(args); return newNode; } // Inserts the specified node immediately after the specified reference node. public virtual XmlNode InsertAfter(XmlNode newChild, XmlNode refChild) { if (this == newChild || AncestorNode(newChild)) throw new ArgumentException(SR.Xdom_Node_Insert_Child); if (refChild == null) return PrependChild(newChild); if (!IsContainer) throw new InvalidOperationException(SR.Xdom_Node_Insert_Contain); if (refChild.ParentNode != this) throw new ArgumentException(SR.Xdom_Node_Insert_Path); if (newChild == refChild) return newChild; XmlDocument childDoc = newChild.OwnerDocument; XmlDocument thisDoc = OwnerDocument; if (childDoc != null && childDoc != thisDoc && childDoc != this) throw new ArgumentException(SR.Xdom_Node_Insert_Context); if (!CanInsertAfter(newChild, refChild)) throw new InvalidOperationException(SR.Xdom_Node_Insert_Location); if (newChild.ParentNode != null) newChild.ParentNode.RemoveChild(newChild); // special case for doc-fragment. if (newChild.NodeType == XmlNodeType.DocumentFragment) { XmlNode last = refChild; XmlNode first = newChild.FirstChild; XmlNode node = first; while (node != null) { XmlNode next = node.NextSibling; newChild.RemoveChild(node); InsertAfter(node, last); last = node; node = next; } return first; } if (!(newChild is XmlLinkedNode) || !IsValidChildType(newChild.NodeType)) throw new InvalidOperationException(SR.Xdom_Node_Insert_TypeConflict); XmlLinkedNode newNode = (XmlLinkedNode)newChild; XmlLinkedNode refNode = (XmlLinkedNode)refChild; string newChildValue = newChild.Value; XmlNodeChangedEventArgs args = GetEventArgs(newChild, newChild.ParentNode, this, newChildValue, newChildValue, XmlNodeChangedAction.Insert); if (args != null) BeforeEvent(args); if (refNode == LastNode) { newNode.next = refNode.next; refNode.next = newNode; LastNode = newNode; newNode.SetParent(this); if (refNode.IsText) { if (newNode.IsText) { NestTextNodes(refNode, newNode); } } } else { XmlLinkedNode nextNode = refNode.next; newNode.next = nextNode; refNode.next = newNode; newNode.SetParent(this); if (refNode.IsText) { if (newNode.IsText) { NestTextNodes(refNode, newNode); if (nextNode.IsText) { NestTextNodes(newNode, nextNode); } } else { if (nextNode.IsText) { UnnestTextNodes(refNode, nextNode); } } } else { if (newNode.IsText) { if (nextNode.IsText) { NestTextNodes(newNode, nextNode); } } } } if (args != null) AfterEvent(args); return newNode; } // Replaces the child node oldChild with newChild node. public virtual XmlNode ReplaceChild(XmlNode newChild, XmlNode oldChild) { XmlNode nextNode = oldChild.NextSibling; RemoveChild(oldChild); XmlNode node = InsertBefore(newChild, nextNode); return oldChild; } // Removes specified child node. public virtual XmlNode RemoveChild(XmlNode oldChild) { if (!IsContainer) throw new InvalidOperationException(SR.Xdom_Node_Remove_Contain); if (oldChild.ParentNode != this) throw new ArgumentException(SR.Xdom_Node_Remove_Child); XmlLinkedNode oldNode = (XmlLinkedNode)oldChild; string oldNodeValue = oldNode.Value; XmlNodeChangedEventArgs args = GetEventArgs(oldNode, this, null, oldNodeValue, oldNodeValue, XmlNodeChangedAction.Remove); if (args != null) BeforeEvent(args); XmlLinkedNode lastNode = LastNode; if (oldNode == FirstChild) { if (oldNode == lastNode) { LastNode = null; oldNode.next = null; oldNode.SetParent(null); } else { XmlLinkedNode nextNode = oldNode.next; if (nextNode.IsText) { if (oldNode.IsText) { UnnestTextNodes(oldNode, nextNode); } } lastNode.next = nextNode; oldNode.next = null; oldNode.SetParent(null); } } else { if (oldNode == lastNode) { XmlLinkedNode prevNode = (XmlLinkedNode)oldNode.PreviousSibling; prevNode.next = oldNode.next; LastNode = prevNode; oldNode.next = null; oldNode.SetParent(null); } else { XmlLinkedNode prevNode = (XmlLinkedNode)oldNode.PreviousSibling; XmlLinkedNode nextNode = oldNode.next; if (nextNode.IsText) { if (prevNode.IsText) { NestTextNodes(prevNode, nextNode); } else { if (oldNode.IsText) { UnnestTextNodes(oldNode, nextNode); } } } prevNode.next = nextNode; oldNode.next = null; oldNode.SetParent(null); } } if (args != null) AfterEvent(args); return oldChild; } // Adds the specified node to the beginning of the list of children of this node. public virtual XmlNode PrependChild(XmlNode newChild) { return InsertBefore(newChild, FirstChild); } // Adds the specified node to the end of the list of children of this node. public virtual XmlNode AppendChild(XmlNode newChild) { XmlDocument thisDoc = OwnerDocument; if (thisDoc == null) { thisDoc = this as XmlDocument; } if (!IsContainer) throw new InvalidOperationException(SR.Xdom_Node_Insert_Contain); if (this == newChild || AncestorNode(newChild)) throw new ArgumentException(SR.Xdom_Node_Insert_Child); if (newChild.ParentNode != null) newChild.ParentNode.RemoveChild(newChild); XmlDocument childDoc = newChild.OwnerDocument; if (childDoc != null && childDoc != thisDoc && childDoc != this) throw new ArgumentException(SR.Xdom_Node_Insert_Context); // special case for doc-fragment. if (newChild.NodeType == XmlNodeType.DocumentFragment) { XmlNode first = newChild.FirstChild; XmlNode node = first; while (node != null) { XmlNode next = node.NextSibling; newChild.RemoveChild(node); AppendChild(node); node = next; } return first; } if (!(newChild is XmlLinkedNode) || !IsValidChildType(newChild.NodeType)) throw new InvalidOperationException(SR.Xdom_Node_Insert_TypeConflict); if (!CanInsertAfter(newChild, LastChild)) throw new InvalidOperationException(SR.Xdom_Node_Insert_Location); string newChildValue = newChild.Value; XmlNodeChangedEventArgs args = GetEventArgs(newChild, newChild.ParentNode, this, newChildValue, newChildValue, XmlNodeChangedAction.Insert); if (args != null) BeforeEvent(args); XmlLinkedNode refNode = LastNode; XmlLinkedNode newNode = (XmlLinkedNode)newChild; if (refNode == null) { newNode.next = newNode; LastNode = newNode; newNode.SetParent(this); } else { newNode.next = refNode.next; refNode.next = newNode; LastNode = newNode; newNode.SetParent(this); if (refNode.IsText) { if (newNode.IsText) { NestTextNodes(refNode, newNode); } } } if (args != null) AfterEvent(args); return newNode; } //the function is provided only at Load time to speed up Load process internal virtual XmlNode AppendChildForLoad(XmlNode newChild, XmlDocument doc) { XmlNodeChangedEventArgs args = doc.GetInsertEventArgsForLoad(newChild, this); if (args != null) doc.BeforeEvent(args); XmlLinkedNode refNode = LastNode; XmlLinkedNode newNode = (XmlLinkedNode)newChild; if (refNode == null) { newNode.next = newNode; LastNode = newNode; newNode.SetParentForLoad(this); } else { newNode.next = refNode.next; refNode.next = newNode; LastNode = newNode; if (refNode.IsText && newNode.IsText) { NestTextNodes(refNode, newNode); } else { newNode.SetParentForLoad(this); } } if (args != null) doc.AfterEvent(args); return newNode; } internal virtual bool IsValidChildType(XmlNodeType type) { return false; } internal virtual bool CanInsertBefore(XmlNode newChild, XmlNode refChild) { return true; } internal virtual bool CanInsertAfter(XmlNode newChild, XmlNode refChild) { return true; } // Gets a value indicating whether this node has any child nodes. public virtual bool HasChildNodes { get { return LastNode != null; } } // Creates a duplicate of this node. public abstract XmlNode CloneNode(bool deep); internal virtual void CopyChildren(XmlDocument doc, XmlNode container, bool deep) { for (XmlNode child = container.FirstChild; child != null; child = child.NextSibling) { AppendChildForLoad(child.CloneNode(deep), doc); } } // DOM Level 2 // Puts all XmlText nodes in the full depth of the sub-tree // underneath this XmlNode into a "normal" form where only // markup (e.g., tags, comments, processing instructions, CDATA sections, // and entity references) separates XmlText nodes, that is, there // are no adjacent XmlText nodes. public virtual void Normalize() { XmlNode firstChildTextLikeNode = null; StringBuilder sb = StringBuilderCache.Acquire(); for (XmlNode crtChild = this.FirstChild; crtChild != null;) { XmlNode nextChild = crtChild.NextSibling; switch (crtChild.NodeType) { case XmlNodeType.Text: case XmlNodeType.Whitespace: case XmlNodeType.SignificantWhitespace: { sb.Append(crtChild.Value); XmlNode winner = NormalizeWinner(firstChildTextLikeNode, crtChild); if (winner == firstChildTextLikeNode) { this.RemoveChild(crtChild); } else { if (firstChildTextLikeNode != null) this.RemoveChild(firstChildTextLikeNode); firstChildTextLikeNode = crtChild; } break; } case XmlNodeType.Element: { crtChild.Normalize(); goto default; } default: { if (firstChildTextLikeNode != null) { firstChildTextLikeNode.Value = sb.ToString(); firstChildTextLikeNode = null; } sb.Remove(0, sb.Length); break; } } crtChild = nextChild; } if (firstChildTextLikeNode != null && sb.Length > 0) firstChildTextLikeNode.Value = sb.ToString(); StringBuilderCache.Release(sb); } private XmlNode NormalizeWinner(XmlNode firstNode, XmlNode secondNode) { //first node has the priority if (firstNode == null) return secondNode; Debug.Assert(firstNode.NodeType == XmlNodeType.Text || firstNode.NodeType == XmlNodeType.SignificantWhitespace || firstNode.NodeType == XmlNodeType.Whitespace || secondNode.NodeType == XmlNodeType.Text || secondNode.NodeType == XmlNodeType.SignificantWhitespace || secondNode.NodeType == XmlNodeType.Whitespace); if (firstNode.NodeType == XmlNodeType.Text) return firstNode; if (secondNode.NodeType == XmlNodeType.Text) return secondNode; if (firstNode.NodeType == XmlNodeType.SignificantWhitespace) return firstNode; if (secondNode.NodeType == XmlNodeType.SignificantWhitespace) return secondNode; if (firstNode.NodeType == XmlNodeType.Whitespace) return firstNode; if (secondNode.NodeType == XmlNodeType.Whitespace) return secondNode; Debug.Assert(true, "shouldn't have fall through here."); return null; } // Test if the DOM implementation implements a specific feature. public virtual bool Supports(string feature, string version) { if (string.Equals("XML", feature, StringComparison.OrdinalIgnoreCase)) { if (version == null || version == "1.0" || version == "2.0") return true; } return false; } // Gets the namespace URI of this node. public virtual string NamespaceURI { get { return string.Empty; } } // Gets or sets the namespace prefix of this node. public virtual string Prefix { get { return string.Empty; } set { } } // Gets the name of the node without the namespace prefix. public abstract string LocalName { get; } // Microsoft extensions // Gets a value indicating whether the node is read-only. public virtual bool IsReadOnly { get { XmlDocument doc = OwnerDocument; return HasReadOnlyParent(this); } } internal static bool HasReadOnlyParent(XmlNode n) { while (n != null) { switch (n.NodeType) { case XmlNodeType.EntityReference: case XmlNodeType.Entity: return true; case XmlNodeType.Attribute: n = ((XmlAttribute)n).OwnerElement; break; default: n = n.ParentNode; break; } } return false; } // Creates a duplicate of this node. public virtual XmlNode Clone() { return this.CloneNode(true); } object ICloneable.Clone() { return this.CloneNode(true); } // Provides a simple ForEach-style iteration over the // collection of nodes in this XmlNamedNodeMap. IEnumerator IEnumerable.GetEnumerator() { return new XmlChildEnumerator(this); } public IEnumerator GetEnumerator() { return new XmlChildEnumerator(this); } private void AppendChildText(StringBuilder builder) { for (XmlNode child = FirstChild; child != null; child = child.NextSibling) { if (child.FirstChild == null) { if (child.NodeType == XmlNodeType.Text || child.NodeType == XmlNodeType.CDATA || child.NodeType == XmlNodeType.Whitespace || child.NodeType == XmlNodeType.SignificantWhitespace) builder.Append(child.InnerText); } else { child.AppendChildText(builder); } } } // Gets or sets the concatenated values of the node and // all its children. public virtual string InnerText { get { XmlNode fc = FirstChild; if (fc == null) { return string.Empty; } if (fc.NextSibling == null) { XmlNodeType nodeType = fc.NodeType; switch (nodeType) { case XmlNodeType.Text: case XmlNodeType.CDATA: case XmlNodeType.Whitespace: case XmlNodeType.SignificantWhitespace: return fc.Value; } } StringBuilder builder = StringBuilderCache.Acquire(); AppendChildText(builder); return StringBuilderCache.GetStringAndRelease(builder); } set { XmlNode firstChild = FirstChild; if (firstChild != null //there is one child && firstChild.NextSibling == null // and exactly one && firstChild.NodeType == XmlNodeType.Text)//which is a text node { //this branch is for perf reason and event fired when TextNode.Value is changed firstChild.Value = value; } else { RemoveAll(); AppendChild(OwnerDocument.CreateTextNode(value)); } } } // Gets the markup representing this node and all its children. public virtual string OuterXml { get { StringWriter sw = new StringWriter(CultureInfo.InvariantCulture); XmlDOMTextWriter xw = new XmlDOMTextWriter(sw); try { WriteTo(xw); } finally { xw.Close(); } return sw.ToString(); } } // Gets or sets the markup representing just the children of this node. public virtual string InnerXml { get { StringWriter sw = new StringWriter(CultureInfo.InvariantCulture); XmlDOMTextWriter xw = new XmlDOMTextWriter(sw); try { WriteContentTo(xw); } finally { xw.Close(); } return sw.ToString(); } set { throw new InvalidOperationException(SR.Xdom_Set_InnerXml); } } public virtual IXmlSchemaInfo SchemaInfo { get { return XmlDocument.NotKnownSchemaInfo; } } public virtual string BaseURI { get { XmlNode curNode = this.ParentNode; //save one while loop since if going to here, the nodetype of this node can't be document, entity and entityref while (curNode != null) { XmlNodeType nt = curNode.NodeType; //EntityReference's children come from the dtd where they are defined. //we need to investigate the same thing for entity's children if they are defined in an external dtd file. if (nt == XmlNodeType.EntityReference) return ((XmlEntityReference)curNode).ChildBaseURI; if (nt == XmlNodeType.Document || nt == XmlNodeType.Entity || nt == XmlNodeType.Attribute) return curNode.BaseURI; curNode = curNode.ParentNode; } return string.Empty; } } // Saves the current node to the specified XmlWriter. public abstract void WriteTo(XmlWriter w); // Saves all the children of the node to the specified XmlWriter. public abstract void WriteContentTo(XmlWriter w); // Removes all the children and/or attributes // of the current node. public virtual void RemoveAll() { XmlNode child = FirstChild; XmlNode sibling = null; while (child != null) { sibling = child.NextSibling; RemoveChild(child); child = sibling; } } internal XmlDocument Document { get { if (NodeType == XmlNodeType.Document) return (XmlDocument)this; return OwnerDocument; } } // Looks up the closest xmlns declaration for the given // prefix that is in scope for the current node and returns // the namespace URI in the declaration. public virtual string GetNamespaceOfPrefix(string prefix) { string namespaceName = GetNamespaceOfPrefixStrict(prefix); return namespaceName != null ? namespaceName : string.Empty; } internal string GetNamespaceOfPrefixStrict(string prefix) { XmlDocument doc = Document; if (doc != null) { prefix = doc.NameTable.Get(prefix); if (prefix == null) return null; XmlNode node = this; while (node != null) { if (node.NodeType == XmlNodeType.Element) { XmlElement elem = (XmlElement)node; if (elem.HasAttributes) { XmlAttributeCollection attrs = elem.Attributes; if (prefix.Length == 0) { for (int iAttr = 0; iAttr < attrs.Count; iAttr++) { XmlAttribute attr = attrs[iAttr]; if (attr.Prefix.Length == 0) { if (Ref.Equal(attr.LocalName, doc.strXmlns)) { return attr.Value; // found xmlns } } } } else { for (int iAttr = 0; iAttr < attrs.Count; iAttr++) { XmlAttribute attr = attrs[iAttr]; if (Ref.Equal(attr.Prefix, doc.strXmlns)) { if (Ref.Equal(attr.LocalName, prefix)) { return attr.Value; // found xmlns:prefix } } else if (Ref.Equal(attr.Prefix, prefix)) { return attr.NamespaceURI; // found prefix:attr } } } } if (Ref.Equal(node.Prefix, prefix)) { return node.NamespaceURI; } node = node.ParentNode; } else if (node.NodeType == XmlNodeType.Attribute) { node = ((XmlAttribute)node).OwnerElement; } else { node = node.ParentNode; } } if (Ref.Equal(doc.strXml, prefix)) { // xmlns:xml return doc.strReservedXml; } else if (Ref.Equal(doc.strXmlns, prefix)) { // xmlns:xmlns return doc.strReservedXmlns; } } return null; } // Looks up the closest xmlns declaration for the given namespace // URI that is in scope for the current node and returns // the prefix defined in that declaration. public virtual string GetPrefixOfNamespace(string namespaceURI) { string prefix = GetPrefixOfNamespaceStrict(namespaceURI); return prefix != null ? prefix : string.Empty; } internal string GetPrefixOfNamespaceStrict(string namespaceURI) { XmlDocument doc = Document; if (doc != null) { namespaceURI = doc.NameTable.Add(namespaceURI); XmlNode node = this; while (node != null) { if (node.NodeType == XmlNodeType.Element) { XmlElement elem = (XmlElement)node; if (elem.HasAttributes) { XmlAttributeCollection attrs = elem.Attributes; for (int iAttr = 0; iAttr < attrs.Count; iAttr++) { XmlAttribute attr = attrs[iAttr]; if (attr.Prefix.Length == 0) { if (Ref.Equal(attr.LocalName, doc.strXmlns)) { if (attr.Value == namespaceURI) { return string.Empty; // found xmlns="namespaceURI" } } } else if (Ref.Equal(attr.Prefix, doc.strXmlns)) { if (attr.Value == namespaceURI) { return attr.LocalName; // found xmlns:prefix="namespaceURI" } } else if (Ref.Equal(attr.NamespaceURI, namespaceURI)) { return attr.Prefix; // found prefix:attr // with prefix bound to namespaceURI } } } if (Ref.Equal(node.NamespaceURI, namespaceURI)) { return node.Prefix; } node = node.ParentNode; } else if (node.NodeType == XmlNodeType.Attribute) { node = ((XmlAttribute)node).OwnerElement; } else { node = node.ParentNode; } } if (Ref.Equal(doc.strReservedXml, namespaceURI)) { // xmlns:xml return doc.strXml; } else if (Ref.Equal(doc.strReservedXmlns, namespaceURI)) { // xmlns:xmlns return doc.strXmlns; } } return null; } // Retrieves the first child element with the specified name. public virtual XmlElement this[string name] { get { for (XmlNode n = FirstChild; n != null; n = n.NextSibling) { if (n.NodeType == XmlNodeType.Element && n.Name == name) return (XmlElement)n; } return null; } } // Retrieves the first child element with the specified LocalName and // NamespaceURI. public virtual XmlElement this[string localname, string ns] { get { for (XmlNode n = FirstChild; n != null; n = n.NextSibling) { if (n.NodeType == XmlNodeType.Element && n.LocalName == localname && n.NamespaceURI == ns) return (XmlElement)n; } return null; } } internal virtual void SetParent(XmlNode node) { if (node == null) { this.parentNode = OwnerDocument; } else { this.parentNode = node; } } internal virtual void SetParentForLoad(XmlNode node) { this.parentNode = node; } internal static void SplitName(string name, out string prefix, out string localName) { int colonPos = name.IndexOf(':'); // ordinal compare if (-1 == colonPos || 0 == colonPos || name.Length - 1 == colonPos) { prefix = string.Empty; localName = name; } else { prefix = name.Substring(0, colonPos); localName = name.Substring(colonPos + 1); } } internal virtual XmlNode FindChild(XmlNodeType type) { for (XmlNode child = FirstChild; child != null; child = child.NextSibling) { if (child.NodeType == type) { return child; } } return null; } internal virtual XmlNodeChangedEventArgs GetEventArgs(XmlNode node, XmlNode oldParent, XmlNode newParent, string oldValue, string newValue, XmlNodeChangedAction action) { XmlDocument doc = OwnerDocument; if (doc != null) { if (!doc.IsLoading) { if (((newParent != null && newParent.IsReadOnly) || (oldParent != null && oldParent.IsReadOnly))) throw new InvalidOperationException(SR.Xdom_Node_Modify_ReadOnly); } return doc.GetEventArgs(node, oldParent, newParent, oldValue, newValue, action); } return null; } internal virtual void BeforeEvent(XmlNodeChangedEventArgs args) { if (args != null) OwnerDocument.BeforeEvent(args); } internal virtual void AfterEvent(XmlNodeChangedEventArgs args) { if (args != null) OwnerDocument.AfterEvent(args); } internal virtual XmlSpace XmlSpace { get { XmlNode node = this; XmlElement elem = null; do { elem = node as XmlElement; if (elem != null && elem.HasAttribute("xml:space")) { switch (XmlConvert.TrimString(elem.GetAttribute("xml:space"))) { case "default": return XmlSpace.Default; case "preserve": return XmlSpace.Preserve; default: //should we throw exception if value is otherwise? break; } } node = node.ParentNode; } while (node != null); return XmlSpace.None; } } internal virtual string XmlLang { get { XmlNode node = this; XmlElement elem = null; do { elem = node as XmlElement; if (elem != null) { if (elem.HasAttribute("xml:lang")) return elem.GetAttribute("xml:lang"); } node = node.ParentNode; } while (node != null); return string.Empty; } } internal virtual XPathNodeType XPNodeType { get { return (XPathNodeType)(-1); } } internal virtual string XPLocalName { get { return string.Empty; } } internal virtual string GetXPAttribute(string localName, string namespaceURI) { return string.Empty; } internal virtual bool IsText { get { return false; } } public virtual XmlNode PreviousText { get { return null; } } internal static void NestTextNodes(XmlNode prevNode, XmlNode nextNode) { Debug.Assert(prevNode.IsText); Debug.Assert(nextNode.IsText); nextNode.parentNode = prevNode; } internal static void UnnestTextNodes(XmlNode prevNode, XmlNode nextNode) { Debug.Assert(prevNode.IsText); Debug.Assert(nextNode.IsText); nextNode.parentNode = prevNode.ParentNode; } private object debuggerDisplayProxy { get { return new DebuggerDisplayXmlNodeProxy(this); } } [DebuggerDisplay("{ToString()}")] internal readonly struct DebuggerDisplayXmlNodeProxy { private readonly XmlNode _node; public DebuggerDisplayXmlNodeProxy(XmlNode node) { _node = node; } public override string ToString() { XmlNodeType nodeType = _node.NodeType; string result = nodeType.ToString(); switch (nodeType) { case XmlNodeType.Element: case XmlNodeType.EntityReference: result += ", Name=\"" + _node.Name + "\""; break; case XmlNodeType.Attribute: case XmlNodeType.ProcessingInstruction: result += ", Name=\"" + _node.Name + "\", Value=\"" + XmlConvert.EscapeValueForDebuggerDisplay(_node.Value) + "\""; break; case XmlNodeType.Text: case XmlNodeType.CDATA: case XmlNodeType.Comment: case XmlNodeType.Whitespace: case XmlNodeType.SignificantWhitespace: case XmlNodeType.XmlDeclaration: result += ", Value=\"" + XmlConvert.EscapeValueForDebuggerDisplay(_node.Value) + "\""; break; case XmlNodeType.DocumentType: XmlDocumentType documentType = (XmlDocumentType)_node; result += ", Name=\"" + documentType.Name + "\", SYSTEM=\"" + documentType.SystemId + "\", PUBLIC=\"" + documentType.PublicId + "\", Value=\"" + XmlConvert.EscapeValueForDebuggerDisplay(documentType.InternalSubset) + "\""; break; default: break; } return result; } } } }