context
stringlengths
2.52k
185k
gt
stringclasses
1 value
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gax = Google.Api.Gax; using gaxgrpc = Google.Api.Gax.Grpc; using gaxgrpccore = Google.Api.Gax.Grpc.GrpcCore; using proto = Google.Protobuf; using grpccore = Grpc.Core; using grpcinter = Grpc.Core.Interceptors; using sys = System; using scg = System.Collections.Generic; using sco = System.Collections.ObjectModel; using st = System.Threading; using stt = System.Threading.Tasks; namespace Google.Cloud.WebRisk.V1Beta1 { /// <summary>Settings for <see cref="WebRiskServiceV1Beta1Client"/> instances.</summary> public sealed partial class WebRiskServiceV1Beta1Settings : gaxgrpc::ServiceSettingsBase { /// <summary>Get a new instance of the default <see cref="WebRiskServiceV1Beta1Settings"/>.</summary> /// <returns>A new instance of the default <see cref="WebRiskServiceV1Beta1Settings"/>.</returns> public static WebRiskServiceV1Beta1Settings GetDefault() => new WebRiskServiceV1Beta1Settings(); /// <summary> /// Constructs a new <see cref="WebRiskServiceV1Beta1Settings"/> object with default settings. /// </summary> public WebRiskServiceV1Beta1Settings() { } private WebRiskServiceV1Beta1Settings(WebRiskServiceV1Beta1Settings existing) : base(existing) { gax::GaxPreconditions.CheckNotNull(existing, nameof(existing)); ComputeThreatListDiffSettings = existing.ComputeThreatListDiffSettings; SearchUrisSettings = existing.SearchUrisSettings; SearchHashesSettings = existing.SearchHashesSettings; OnCopy(existing); } partial void OnCopy(WebRiskServiceV1Beta1Settings existing); /// <summary> /// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to /// <c>WebRiskServiceV1Beta1Client.ComputeThreatListDiff</c> and /// <c>WebRiskServiceV1Beta1Client.ComputeThreatListDiffAsync</c>. /// </summary> /// <remarks> /// <list type="bullet"> /// <item><description>Initial retry delay: 100 milliseconds.</description></item> /// <item><description>Retry delay multiplier: 1.3</description></item> /// <item><description>Retry maximum delay: 60000 milliseconds.</description></item> /// <item><description>Maximum attempts: Unlimited</description></item> /// <item> /// <description> /// Retriable status codes: <see cref="grpccore::StatusCode.DeadlineExceeded"/>, /// <see cref="grpccore::StatusCode.Unavailable"/>. /// </description> /// </item> /// <item><description>Timeout: 600 seconds.</description></item> /// </list> /// </remarks> public gaxgrpc::CallSettings ComputeThreatListDiffSettings { get; set; } = gaxgrpc::CallSettingsExtensions.WithRetry(gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(600000))), gaxgrpc::RetrySettings.FromExponentialBackoff(maxAttempts: 2147483647, initialBackoff: sys::TimeSpan.FromMilliseconds(100), maxBackoff: sys::TimeSpan.FromMilliseconds(60000), backoffMultiplier: 1.3, retryFilter: gaxgrpc::RetrySettings.FilterForStatusCodes(grpccore::StatusCode.DeadlineExceeded, grpccore::StatusCode.Unavailable))); /// <summary> /// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to /// <c>WebRiskServiceV1Beta1Client.SearchUris</c> and <c>WebRiskServiceV1Beta1Client.SearchUrisAsync</c>. /// </summary> /// <remarks> /// <list type="bullet"> /// <item><description>Initial retry delay: 100 milliseconds.</description></item> /// <item><description>Retry delay multiplier: 1.3</description></item> /// <item><description>Retry maximum delay: 60000 milliseconds.</description></item> /// <item><description>Maximum attempts: Unlimited</description></item> /// <item> /// <description> /// Retriable status codes: <see cref="grpccore::StatusCode.DeadlineExceeded"/>, /// <see cref="grpccore::StatusCode.Unavailable"/>. /// </description> /// </item> /// <item><description>Timeout: 600 seconds.</description></item> /// </list> /// </remarks> public gaxgrpc::CallSettings SearchUrisSettings { get; set; } = gaxgrpc::CallSettingsExtensions.WithRetry(gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(600000))), gaxgrpc::RetrySettings.FromExponentialBackoff(maxAttempts: 2147483647, initialBackoff: sys::TimeSpan.FromMilliseconds(100), maxBackoff: sys::TimeSpan.FromMilliseconds(60000), backoffMultiplier: 1.3, retryFilter: gaxgrpc::RetrySettings.FilterForStatusCodes(grpccore::StatusCode.DeadlineExceeded, grpccore::StatusCode.Unavailable))); /// <summary> /// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to /// <c>WebRiskServiceV1Beta1Client.SearchHashes</c> and <c>WebRiskServiceV1Beta1Client.SearchHashesAsync</c>. /// </summary> /// <remarks> /// <list type="bullet"> /// <item><description>Initial retry delay: 100 milliseconds.</description></item> /// <item><description>Retry delay multiplier: 1.3</description></item> /// <item><description>Retry maximum delay: 60000 milliseconds.</description></item> /// <item><description>Maximum attempts: Unlimited</description></item> /// <item> /// <description> /// Retriable status codes: <see cref="grpccore::StatusCode.DeadlineExceeded"/>, /// <see cref="grpccore::StatusCode.Unavailable"/>. /// </description> /// </item> /// <item><description>Timeout: 600 seconds.</description></item> /// </list> /// </remarks> public gaxgrpc::CallSettings SearchHashesSettings { get; set; } = gaxgrpc::CallSettingsExtensions.WithRetry(gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(600000))), gaxgrpc::RetrySettings.FromExponentialBackoff(maxAttempts: 2147483647, initialBackoff: sys::TimeSpan.FromMilliseconds(100), maxBackoff: sys::TimeSpan.FromMilliseconds(60000), backoffMultiplier: 1.3, retryFilter: gaxgrpc::RetrySettings.FilterForStatusCodes(grpccore::StatusCode.DeadlineExceeded, grpccore::StatusCode.Unavailable))); /// <summary>Creates a deep clone of this object, with all the same property values.</summary> /// <returns>A deep clone of this <see cref="WebRiskServiceV1Beta1Settings"/> object.</returns> public WebRiskServiceV1Beta1Settings Clone() => new WebRiskServiceV1Beta1Settings(this); } /// <summary> /// Builder class for <see cref="WebRiskServiceV1Beta1Client"/> to provide simple configuration of credentials, /// endpoint etc. /// </summary> public sealed partial class WebRiskServiceV1Beta1ClientBuilder : gaxgrpc::ClientBuilderBase<WebRiskServiceV1Beta1Client> { /// <summary>The settings to use for RPCs, or <c>null</c> for the default settings.</summary> public WebRiskServiceV1Beta1Settings Settings { get; set; } /// <summary>Creates a new builder with default settings.</summary> public WebRiskServiceV1Beta1ClientBuilder() { UseJwtAccessWithScopes = WebRiskServiceV1Beta1Client.UseJwtAccessWithScopes; } partial void InterceptBuild(ref WebRiskServiceV1Beta1Client client); partial void InterceptBuildAsync(st::CancellationToken cancellationToken, ref stt::Task<WebRiskServiceV1Beta1Client> task); /// <summary>Builds the resulting client.</summary> public override WebRiskServiceV1Beta1Client Build() { WebRiskServiceV1Beta1Client client = null; InterceptBuild(ref client); return client ?? BuildImpl(); } /// <summary>Builds the resulting client asynchronously.</summary> public override stt::Task<WebRiskServiceV1Beta1Client> BuildAsync(st::CancellationToken cancellationToken = default) { stt::Task<WebRiskServiceV1Beta1Client> task = null; InterceptBuildAsync(cancellationToken, ref task); return task ?? BuildAsyncImpl(cancellationToken); } private WebRiskServiceV1Beta1Client BuildImpl() { Validate(); grpccore::CallInvoker callInvoker = CreateCallInvoker(); return WebRiskServiceV1Beta1Client.Create(callInvoker, Settings); } private async stt::Task<WebRiskServiceV1Beta1Client> BuildAsyncImpl(st::CancellationToken cancellationToken) { Validate(); grpccore::CallInvoker callInvoker = await CreateCallInvokerAsync(cancellationToken).ConfigureAwait(false); return WebRiskServiceV1Beta1Client.Create(callInvoker, Settings); } /// <summary>Returns the endpoint for this builder type, used if no endpoint is otherwise specified.</summary> protected override string GetDefaultEndpoint() => WebRiskServiceV1Beta1Client.DefaultEndpoint; /// <summary> /// Returns the default scopes for this builder type, used if no scopes are otherwise specified. /// </summary> protected override scg::IReadOnlyList<string> GetDefaultScopes() => WebRiskServiceV1Beta1Client.DefaultScopes; /// <summary>Returns the channel pool to use when no other options are specified.</summary> protected override gaxgrpc::ChannelPool GetChannelPool() => WebRiskServiceV1Beta1Client.ChannelPool; /// <summary>Returns the default <see cref="gaxgrpc::GrpcAdapter"/>to use if not otherwise specified.</summary> protected override gaxgrpc::GrpcAdapter DefaultGrpcAdapter => gaxgrpccore::GrpcCoreAdapter.Instance; } /// <summary>WebRiskServiceV1Beta1 client wrapper, for convenient use.</summary> /// <remarks> /// Web Risk v1beta1 API defines an interface to detect malicious URLs on your /// website and in client applications. /// </remarks> public abstract partial class WebRiskServiceV1Beta1Client { /// <summary> /// The default endpoint for the WebRiskServiceV1Beta1 service, which is a host of "webrisk.googleapis.com" and /// a port of 443. /// </summary> public static string DefaultEndpoint { get; } = "webrisk.googleapis.com:443"; /// <summary>The default WebRiskServiceV1Beta1 scopes.</summary> /// <remarks> /// The default WebRiskServiceV1Beta1 scopes are: /// <list type="bullet"> /// <item><description>https://www.googleapis.com/auth/cloud-platform</description></item> /// </list> /// </remarks> public static scg::IReadOnlyList<string> DefaultScopes { get; } = new sco::ReadOnlyCollection<string>(new string[] { "https://www.googleapis.com/auth/cloud-platform", }); internal static gaxgrpc::ChannelPool ChannelPool { get; } = new gaxgrpc::ChannelPool(DefaultScopes, UseJwtAccessWithScopes); internal static bool UseJwtAccessWithScopes { get { bool useJwtAccessWithScopes = true; MaybeUseJwtAccessWithScopes(ref useJwtAccessWithScopes); return useJwtAccessWithScopes; } } static partial void MaybeUseJwtAccessWithScopes(ref bool useJwtAccessWithScopes); /// <summary> /// Asynchronously creates a <see cref="WebRiskServiceV1Beta1Client"/> using the default credentials, endpoint /// and settings. To specify custom credentials or other settings, use /// <see cref="WebRiskServiceV1Beta1ClientBuilder"/>. /// </summary> /// <param name="cancellationToken"> /// The <see cref="st::CancellationToken"/> to use while creating the client. /// </param> /// <returns>The task representing the created <see cref="WebRiskServiceV1Beta1Client"/>.</returns> public static stt::Task<WebRiskServiceV1Beta1Client> CreateAsync(st::CancellationToken cancellationToken = default) => new WebRiskServiceV1Beta1ClientBuilder().BuildAsync(cancellationToken); /// <summary> /// Synchronously creates a <see cref="WebRiskServiceV1Beta1Client"/> using the default credentials, endpoint /// and settings. To specify custom credentials or other settings, use /// <see cref="WebRiskServiceV1Beta1ClientBuilder"/>. /// </summary> /// <returns>The created <see cref="WebRiskServiceV1Beta1Client"/>.</returns> public static WebRiskServiceV1Beta1Client Create() => new WebRiskServiceV1Beta1ClientBuilder().Build(); /// <summary> /// Creates a <see cref="WebRiskServiceV1Beta1Client"/> which uses the specified call invoker for remote /// operations. /// </summary> /// <param name="callInvoker"> /// The <see cref="grpccore::CallInvoker"/> for remote operations. Must not be null. /// </param> /// <param name="settings">Optional <see cref="WebRiskServiceV1Beta1Settings"/>.</param> /// <returns>The created <see cref="WebRiskServiceV1Beta1Client"/>.</returns> internal static WebRiskServiceV1Beta1Client Create(grpccore::CallInvoker callInvoker, WebRiskServiceV1Beta1Settings settings = null) { gax::GaxPreconditions.CheckNotNull(callInvoker, nameof(callInvoker)); grpcinter::Interceptor interceptor = settings?.Interceptor; if (interceptor != null) { callInvoker = grpcinter::CallInvokerExtensions.Intercept(callInvoker, interceptor); } WebRiskServiceV1Beta1.WebRiskServiceV1Beta1Client grpcClient = new WebRiskServiceV1Beta1.WebRiskServiceV1Beta1Client(callInvoker); return new WebRiskServiceV1Beta1ClientImpl(grpcClient, settings); } /// <summary> /// Shuts down any channels automatically created by <see cref="Create()"/> and /// <see cref="CreateAsync(st::CancellationToken)"/>. Channels which weren't automatically created are not /// affected. /// </summary> /// <remarks> /// After calling this method, further calls to <see cref="Create()"/> and /// <see cref="CreateAsync(st::CancellationToken)"/> will create new channels, which could in turn be shut down /// by another call to this method. /// </remarks> /// <returns>A task representing the asynchronous shutdown operation.</returns> public static stt::Task ShutdownDefaultChannelsAsync() => ChannelPool.ShutdownChannelsAsync(); /// <summary>The underlying gRPC WebRiskServiceV1Beta1 client</summary> public virtual WebRiskServiceV1Beta1.WebRiskServiceV1Beta1Client GrpcClient => throw new sys::NotImplementedException(); /// <summary> /// Gets the most recent threat list diffs. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual ComputeThreatListDiffResponse ComputeThreatListDiff(ComputeThreatListDiffRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Gets the most recent threat list diffs. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<ComputeThreatListDiffResponse> ComputeThreatListDiffAsync(ComputeThreatListDiffRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Gets the most recent threat list diffs. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<ComputeThreatListDiffResponse> ComputeThreatListDiffAsync(ComputeThreatListDiffRequest request, st::CancellationToken cancellationToken) => ComputeThreatListDiffAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Gets the most recent threat list diffs. /// </summary> /// <param name="threatType"> /// The ThreatList to update. /// </param> /// <param name="versionToken"> /// The current version token of the client for the requested list (the /// client version that was received from the last successful diff). /// </param> /// <param name="constraints"> /// Required. The constraints associated with this request. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual ComputeThreatListDiffResponse ComputeThreatListDiff(ThreatType threatType, proto::ByteString versionToken, ComputeThreatListDiffRequest.Types.Constraints constraints, gaxgrpc::CallSettings callSettings = null) => ComputeThreatListDiff(new ComputeThreatListDiffRequest { ThreatType = threatType, VersionToken = versionToken ?? proto::ByteString.Empty, Constraints = gax::GaxPreconditions.CheckNotNull(constraints, nameof(constraints)), }, callSettings); /// <summary> /// Gets the most recent threat list diffs. /// </summary> /// <param name="threatType"> /// The ThreatList to update. /// </param> /// <param name="versionToken"> /// The current version token of the client for the requested list (the /// client version that was received from the last successful diff). /// </param> /// <param name="constraints"> /// Required. The constraints associated with this request. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<ComputeThreatListDiffResponse> ComputeThreatListDiffAsync(ThreatType threatType, proto::ByteString versionToken, ComputeThreatListDiffRequest.Types.Constraints constraints, gaxgrpc::CallSettings callSettings = null) => ComputeThreatListDiffAsync(new ComputeThreatListDiffRequest { ThreatType = threatType, VersionToken = versionToken ?? proto::ByteString.Empty, Constraints = gax::GaxPreconditions.CheckNotNull(constraints, nameof(constraints)), }, callSettings); /// <summary> /// Gets the most recent threat list diffs. /// </summary> /// <param name="threatType"> /// The ThreatList to update. /// </param> /// <param name="versionToken"> /// The current version token of the client for the requested list (the /// client version that was received from the last successful diff). /// </param> /// <param name="constraints"> /// Required. The constraints associated with this request. /// </param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<ComputeThreatListDiffResponse> ComputeThreatListDiffAsync(ThreatType threatType, proto::ByteString versionToken, ComputeThreatListDiffRequest.Types.Constraints constraints, st::CancellationToken cancellationToken) => ComputeThreatListDiffAsync(threatType, versionToken, constraints, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// This method is used to check whether a URI is on a given threatList. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual SearchUrisResponse SearchUris(SearchUrisRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// This method is used to check whether a URI is on a given threatList. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<SearchUrisResponse> SearchUrisAsync(SearchUrisRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// This method is used to check whether a URI is on a given threatList. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<SearchUrisResponse> SearchUrisAsync(SearchUrisRequest request, st::CancellationToken cancellationToken) => SearchUrisAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// This method is used to check whether a URI is on a given threatList. /// </summary> /// <param name="uri"> /// Required. The URI to be checked for matches. /// </param> /// <param name="threatTypes"> /// Required. The ThreatLists to search in. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual SearchUrisResponse SearchUris(string uri, scg::IEnumerable<ThreatType> threatTypes, gaxgrpc::CallSettings callSettings = null) => SearchUris(new SearchUrisRequest { Uri = gax::GaxPreconditions.CheckNotNullOrEmpty(uri, nameof(uri)), ThreatTypes = { gax::GaxPreconditions.CheckNotNull(threatTypes, nameof(threatTypes)), }, }, callSettings); /// <summary> /// This method is used to check whether a URI is on a given threatList. /// </summary> /// <param name="uri"> /// Required. The URI to be checked for matches. /// </param> /// <param name="threatTypes"> /// Required. The ThreatLists to search in. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<SearchUrisResponse> SearchUrisAsync(string uri, scg::IEnumerable<ThreatType> threatTypes, gaxgrpc::CallSettings callSettings = null) => SearchUrisAsync(new SearchUrisRequest { Uri = gax::GaxPreconditions.CheckNotNullOrEmpty(uri, nameof(uri)), ThreatTypes = { gax::GaxPreconditions.CheckNotNull(threatTypes, nameof(threatTypes)), }, }, callSettings); /// <summary> /// This method is used to check whether a URI is on a given threatList. /// </summary> /// <param name="uri"> /// Required. The URI to be checked for matches. /// </param> /// <param name="threatTypes"> /// Required. The ThreatLists to search in. /// </param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<SearchUrisResponse> SearchUrisAsync(string uri, scg::IEnumerable<ThreatType> threatTypes, st::CancellationToken cancellationToken) => SearchUrisAsync(uri, threatTypes, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Gets the full hashes that match the requested hash prefix. /// This is used after a hash prefix is looked up in a threatList /// and there is a match. The client side threatList only holds partial hashes /// so the client must query this method to determine if there is a full /// hash match of a threat. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual SearchHashesResponse SearchHashes(SearchHashesRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Gets the full hashes that match the requested hash prefix. /// This is used after a hash prefix is looked up in a threatList /// and there is a match. The client side threatList only holds partial hashes /// so the client must query this method to determine if there is a full /// hash match of a threat. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<SearchHashesResponse> SearchHashesAsync(SearchHashesRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Gets the full hashes that match the requested hash prefix. /// This is used after a hash prefix is looked up in a threatList /// and there is a match. The client side threatList only holds partial hashes /// so the client must query this method to determine if there is a full /// hash match of a threat. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<SearchHashesResponse> SearchHashesAsync(SearchHashesRequest request, st::CancellationToken cancellationToken) => SearchHashesAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Gets the full hashes that match the requested hash prefix. /// This is used after a hash prefix is looked up in a threatList /// and there is a match. The client side threatList only holds partial hashes /// so the client must query this method to determine if there is a full /// hash match of a threat. /// </summary> /// <param name="hashPrefix"> /// A hash prefix, consisting of the most significant 4-32 bytes of a SHA256 /// hash. For JSON requests, this field is base64-encoded. /// </param> /// <param name="threatTypes"> /// Required. The ThreatLists to search in. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual SearchHashesResponse SearchHashes(proto::ByteString hashPrefix, scg::IEnumerable<ThreatType> threatTypes, gaxgrpc::CallSettings callSettings = null) => SearchHashes(new SearchHashesRequest { HashPrefix = hashPrefix ?? proto::ByteString.Empty, ThreatTypes = { gax::GaxPreconditions.CheckNotNull(threatTypes, nameof(threatTypes)), }, }, callSettings); /// <summary> /// Gets the full hashes that match the requested hash prefix. /// This is used after a hash prefix is looked up in a threatList /// and there is a match. The client side threatList only holds partial hashes /// so the client must query this method to determine if there is a full /// hash match of a threat. /// </summary> /// <param name="hashPrefix"> /// A hash prefix, consisting of the most significant 4-32 bytes of a SHA256 /// hash. For JSON requests, this field is base64-encoded. /// </param> /// <param name="threatTypes"> /// Required. The ThreatLists to search in. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<SearchHashesResponse> SearchHashesAsync(proto::ByteString hashPrefix, scg::IEnumerable<ThreatType> threatTypes, gaxgrpc::CallSettings callSettings = null) => SearchHashesAsync(new SearchHashesRequest { HashPrefix = hashPrefix ?? proto::ByteString.Empty, ThreatTypes = { gax::GaxPreconditions.CheckNotNull(threatTypes, nameof(threatTypes)), }, }, callSettings); /// <summary> /// Gets the full hashes that match the requested hash prefix. /// This is used after a hash prefix is looked up in a threatList /// and there is a match. The client side threatList only holds partial hashes /// so the client must query this method to determine if there is a full /// hash match of a threat. /// </summary> /// <param name="hashPrefix"> /// A hash prefix, consisting of the most significant 4-32 bytes of a SHA256 /// hash. For JSON requests, this field is base64-encoded. /// </param> /// <param name="threatTypes"> /// Required. The ThreatLists to search in. /// </param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<SearchHashesResponse> SearchHashesAsync(proto::ByteString hashPrefix, scg::IEnumerable<ThreatType> threatTypes, st::CancellationToken cancellationToken) => SearchHashesAsync(hashPrefix, threatTypes, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); } /// <summary>WebRiskServiceV1Beta1 client wrapper implementation, for convenient use.</summary> /// <remarks> /// Web Risk v1beta1 API defines an interface to detect malicious URLs on your /// website and in client applications. /// </remarks> public sealed partial class WebRiskServiceV1Beta1ClientImpl : WebRiskServiceV1Beta1Client { private readonly gaxgrpc::ApiCall<ComputeThreatListDiffRequest, ComputeThreatListDiffResponse> _callComputeThreatListDiff; private readonly gaxgrpc::ApiCall<SearchUrisRequest, SearchUrisResponse> _callSearchUris; private readonly gaxgrpc::ApiCall<SearchHashesRequest, SearchHashesResponse> _callSearchHashes; /// <summary> /// Constructs a client wrapper for the WebRiskServiceV1Beta1 service, with the specified gRPC client and /// settings. /// </summary> /// <param name="grpcClient">The underlying gRPC client.</param> /// <param name="settings">The base <see cref="WebRiskServiceV1Beta1Settings"/> used within this client.</param> public WebRiskServiceV1Beta1ClientImpl(WebRiskServiceV1Beta1.WebRiskServiceV1Beta1Client grpcClient, WebRiskServiceV1Beta1Settings settings) { GrpcClient = grpcClient; WebRiskServiceV1Beta1Settings effectiveSettings = settings ?? WebRiskServiceV1Beta1Settings.GetDefault(); gaxgrpc::ClientHelper clientHelper = new gaxgrpc::ClientHelper(effectiveSettings); _callComputeThreatListDiff = clientHelper.BuildApiCall<ComputeThreatListDiffRequest, ComputeThreatListDiffResponse>(grpcClient.ComputeThreatListDiffAsync, grpcClient.ComputeThreatListDiff, effectiveSettings.ComputeThreatListDiffSettings); Modify_ApiCall(ref _callComputeThreatListDiff); Modify_ComputeThreatListDiffApiCall(ref _callComputeThreatListDiff); _callSearchUris = clientHelper.BuildApiCall<SearchUrisRequest, SearchUrisResponse>(grpcClient.SearchUrisAsync, grpcClient.SearchUris, effectiveSettings.SearchUrisSettings); Modify_ApiCall(ref _callSearchUris); Modify_SearchUrisApiCall(ref _callSearchUris); _callSearchHashes = clientHelper.BuildApiCall<SearchHashesRequest, SearchHashesResponse>(grpcClient.SearchHashesAsync, grpcClient.SearchHashes, effectiveSettings.SearchHashesSettings); Modify_ApiCall(ref _callSearchHashes); Modify_SearchHashesApiCall(ref _callSearchHashes); OnConstruction(grpcClient, effectiveSettings, clientHelper); } partial void Modify_ApiCall<TRequest, TResponse>(ref gaxgrpc::ApiCall<TRequest, TResponse> call) where TRequest : class, proto::IMessage<TRequest> where TResponse : class, proto::IMessage<TResponse>; partial void Modify_ComputeThreatListDiffApiCall(ref gaxgrpc::ApiCall<ComputeThreatListDiffRequest, ComputeThreatListDiffResponse> call); partial void Modify_SearchUrisApiCall(ref gaxgrpc::ApiCall<SearchUrisRequest, SearchUrisResponse> call); partial void Modify_SearchHashesApiCall(ref gaxgrpc::ApiCall<SearchHashesRequest, SearchHashesResponse> call); partial void OnConstruction(WebRiskServiceV1Beta1.WebRiskServiceV1Beta1Client grpcClient, WebRiskServiceV1Beta1Settings effectiveSettings, gaxgrpc::ClientHelper clientHelper); /// <summary>The underlying gRPC WebRiskServiceV1Beta1 client</summary> public override WebRiskServiceV1Beta1.WebRiskServiceV1Beta1Client GrpcClient { get; } partial void Modify_ComputeThreatListDiffRequest(ref ComputeThreatListDiffRequest request, ref gaxgrpc::CallSettings settings); partial void Modify_SearchUrisRequest(ref SearchUrisRequest request, ref gaxgrpc::CallSettings settings); partial void Modify_SearchHashesRequest(ref SearchHashesRequest request, ref gaxgrpc::CallSettings settings); /// <summary> /// Gets the most recent threat list diffs. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public override ComputeThreatListDiffResponse ComputeThreatListDiff(ComputeThreatListDiffRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_ComputeThreatListDiffRequest(ref request, ref callSettings); return _callComputeThreatListDiff.Sync(request, callSettings); } /// <summary> /// Gets the most recent threat list diffs. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public override stt::Task<ComputeThreatListDiffResponse> ComputeThreatListDiffAsync(ComputeThreatListDiffRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_ComputeThreatListDiffRequest(ref request, ref callSettings); return _callComputeThreatListDiff.Async(request, callSettings); } /// <summary> /// This method is used to check whether a URI is on a given threatList. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public override SearchUrisResponse SearchUris(SearchUrisRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_SearchUrisRequest(ref request, ref callSettings); return _callSearchUris.Sync(request, callSettings); } /// <summary> /// This method is used to check whether a URI is on a given threatList. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public override stt::Task<SearchUrisResponse> SearchUrisAsync(SearchUrisRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_SearchUrisRequest(ref request, ref callSettings); return _callSearchUris.Async(request, callSettings); } /// <summary> /// Gets the full hashes that match the requested hash prefix. /// This is used after a hash prefix is looked up in a threatList /// and there is a match. The client side threatList only holds partial hashes /// so the client must query this method to determine if there is a full /// hash match of a threat. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public override SearchHashesResponse SearchHashes(SearchHashesRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_SearchHashesRequest(ref request, ref callSettings); return _callSearchHashes.Sync(request, callSettings); } /// <summary> /// Gets the full hashes that match the requested hash prefix. /// This is used after a hash prefix is looked up in a threatList /// and there is a match. The client side threatList only holds partial hashes /// so the client must query this method to determine if there is a full /// hash match of a threat. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public override stt::Task<SearchHashesResponse> SearchHashesAsync(SearchHashesRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_SearchHashesRequest(ref request, ref callSettings); return _callSearchHashes.Async(request, callSettings); } } }
using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Text; using System.Windows.Forms; namespace Factotum { public partial class MaterialTypeView : Form { ESiteCollection sites; // ---------------------------------------------------------------------- // Initialization // ---------------------------------------------------------------------- // Form constructor public MaterialTypeView() { InitializeComponent(); // Take care of settings that are not easily managed in the designer. InitializeControls(); } // Take care of settings that are not easily managed in the designer. private void InitializeControls() { sites = ESite.ListByName(true, false, false); cboSiteFilter.DataSource = sites; cboSiteFilter.DisplayMember = "SiteName"; cboSiteFilter.ValueMember = "ID"; HandleEnablingForPropertySettings(); } // Set the status filter to show active types by default // and update the status combo box private void MaterialTypeView_Load(object sender, EventArgs e) { if (sites.Count == 0) { MessageBox.Show("Can't Add Component Materials until there is at Least one Site", "Factotum"); btnAdd.Enabled = false; btnEdit.Enabled = false; btnDelete.Enabled = false; return; } // Set the status combo first. The selector DataGridView depends on it. cboStatusFilter.SelectedIndex = (int)FilterActiveStatus.ShowActive; // Apply the current filters and set the selector row. // Passing a null selects the first row if there are any rows. UpdateSelector(null); // Now that we have some rows and columns, we can do some customization. CustomizeGrid(); // Need to do this because the customization clears the row selection. SelectGridRow(null); this.cboSiteFilter.SelectedIndexChanged += new System.EventHandler(this.cboStatus_SelectedIndexChanged); // Wire up the handler for the Entity changed event EComponentMaterial.Changed += new EventHandler<EntityChangedEventArgs>(EComponentMaterial_Changed); Globals.CurrentOutageChanged += new EventHandler(Globals_CurrentOutageChanged); } private void MaterialTypeView_FormClosed(object sender, FormClosedEventArgs e) { EComponentMaterial.Changed -= new EventHandler<EntityChangedEventArgs>(EComponentMaterial_Changed); Globals.CurrentOutageChanged -= new EventHandler(Globals_CurrentOutageChanged); } // ---------------------------------------------------------------------- // Event Handlers // ---------------------------------------------------------------------- // If any of this type of entity object was saved or deleted, we want to update the selector // The event args contain the ID of the entity that was added, mofified or deleted. void EComponentMaterial_Changed(object sender, EntityChangedEventArgs e) { UpdateSelector(e.ID); } void Globals_CurrentOutageChanged(object sender, EventArgs e) { HandleEnablingForPropertySettings(); } // Handle the user's decision to edit the current tool private void EditCurrentSelection() { // Make sure there's a row selected if (dgvMaterialTypeList.SelectedRows.Count != 1) return; Guid? currentEditItem = (Guid?)(dgvMaterialTypeList.SelectedRows[0].Cells["ID"].Value); // First check to see if an instance of the form set to the selected ID already exists if (!Globals.CanActivateForm(this, "MaterialTypeEdit", currentEditItem)) { // Open the edit form with the currently selected ID. MaterialTypeEdit frm = new MaterialTypeEdit(currentEditItem); frm.MdiParent = this.MdiParent; frm.Show(); } } // This handles the datagridview double-click as well as button click void btnEdit_Click(object sender, System.EventArgs e) { EditCurrentSelection(); } private void dgvMaterialTypeList_KeyDown(object sender, KeyEventArgs e) { if (e.KeyCode == Keys.Enter) EditCurrentSelection(); } // Handle the user's decision to add a new tool private void btnAdd_Click(object sender, EventArgs e) { MaterialTypeEdit frm = new MaterialTypeEdit(null, new Guid(cboSiteFilter.SelectedValue.ToString())); frm.MdiParent = this.MdiParent; frm.Show(); } // Handle the user's decision to delete the selected tool private void btnDelete_Click(object sender, EventArgs e) { if (dgvMaterialTypeList.SelectedRows.Count != 1) { MessageBox.Show("Please select a MaterialType to delete first.", "Factotum"); return; } Guid? currentEditItem = (Guid?)(dgvMaterialTypeList.SelectedRows[0].Cells["ID"].Value); if (Globals.IsFormOpen(this, "MaterialTypeEdit", currentEditItem)) { MessageBox.Show("Can't delete because that item is currently being edited.", "Factotum"); return; } EComponentMaterial ComponentMaterial = new EComponentMaterial(currentEditItem); ComponentMaterial.Delete(true); if (ComponentMaterial.CmpMaterialErrMsg != null) { MessageBox.Show(ComponentMaterial.CmpMaterialErrMsg, "Factotum"); ComponentMaterial.CmpMaterialErrMsg = null; } } // The user changed the status filter setting, so update the selector combo. private void cboStatus_SelectedIndexChanged(object sender, EventArgs e) { ApplyFilters(); } private void btnClose_Click(object sender, EventArgs e) { Close(); } // ---------------------------------------------------------------------- // Private utilities // ---------------------------------------------------------------------- // Update the tool selector combo box by filling its items based on current data and filters. // Then set the currently displayed item to that of the supplied ID. // If the supplied ID isn't on the list because of the current filter state, just show the // first item if there is one. private void UpdateSelector(Guid? id) { // Save the sort specs if there are any, so we can re-apply them SortOrder sortOrder = dgvMaterialTypeList.SortOrder; int sortCol = -1; if (sortOrder != SortOrder.None) sortCol = dgvMaterialTypeList.SortedColumn.Index; // Update the grid view selector DataView dv = EComponentMaterial.GetDefaultDataView(); dgvMaterialTypeList.DataSource = dv; ApplyFilters(); // Re-apply the sort specs if (sortOrder == SortOrder.Ascending) dgvMaterialTypeList.Sort(dgvMaterialTypeList.Columns[sortCol], ListSortDirection.Ascending); else if (sortOrder == SortOrder.Descending) dgvMaterialTypeList.Sort(dgvMaterialTypeList.Columns[sortCol], ListSortDirection.Descending); // Select the current row SelectGridRow(id); } private void CustomizeGrid() { // Apply a default sort dgvMaterialTypeList.Sort(dgvMaterialTypeList.Columns["CmpMaterialName"], ListSortDirection.Ascending); // Fix up the column headings dgvMaterialTypeList.Columns["CmpMaterialCalBlockMaterial"].HeaderText = "Cal Block Material"; dgvMaterialTypeList.Columns["CmpMaterialName"].HeaderText = "Material Type"; dgvMaterialTypeList.Columns["CmpMaterialIsActive"].HeaderText = "Active"; // Hide some columns dgvMaterialTypeList.Columns["ID"].Visible = false; dgvMaterialTypeList.Columns["CmpMaterialIsActive"].Visible = false; dgvMaterialTypeList.Columns["CmpMaterialIsLclChg"].Visible = false; dgvMaterialTypeList.Columns["CmpMaterialSitID"].Visible = false; dgvMaterialTypeList.AutoResizeColumns(DataGridViewAutoSizeColumnsMode.DisplayedCells); } // Apply the current filters to the DataView. The DataGridView will auto-refresh. private void ApplyFilters() { if (dgvMaterialTypeList.DataSource == null) return; StringBuilder sb = new StringBuilder("CmpMaterialIsActive = ", 255); sb.Append(cboStatusFilter.SelectedIndex == (int)FilterActiveStatus.ShowActive ? "'Yes'" : "'No'"); sb.Append(" And CmpMaterialSitID = '" + cboSiteFilter.SelectedValue +"'"); if (txtNameFilter.Text.Length > 0) sb.Append(" And CmpMaterialName Like '" + txtNameFilter.Text + "*'"); DataView dv = (DataView)dgvMaterialTypeList.DataSource; dv.RowFilter = sb.ToString(); } // Select the row with the specified ID if it is currently displayed and scroll to it. // If the ID is not in the list, private void SelectGridRow(Guid? id) { bool found = false; int rows = dgvMaterialTypeList.Rows.Count; if (rows == 0) return; int r = 0; DataGridViewCell firstCell = dgvMaterialTypeList.FirstDisplayedCell; if (id != null) { // Find the row with the specified key id and select it. for (r = 0; r < rows; r++) { if ((Guid?)dgvMaterialTypeList.Rows[r].Cells["ID"].Value == id) { dgvMaterialTypeList.CurrentCell = dgvMaterialTypeList[firstCell.ColumnIndex, r]; dgvMaterialTypeList.Rows[r].Selected = true; found = true; break; } } } if (found) { if (!dgvMaterialTypeList.Rows[r].Displayed) { // Scroll to the selected row if the ID was in the list. dgvMaterialTypeList.FirstDisplayedScrollingRowIndex = r; } } else { // Select the first item dgvMaterialTypeList.CurrentCell = firstCell; dgvMaterialTypeList.Rows[0].Selected = true; } } private void txtNameFilter_TextChanged(object sender, EventArgs e) { ApplyFilters(); } private void cboSiteFilter_SelectedIndexChanged(object sender, EventArgs e) { ApplyFilters(); } private void HandleEnablingForPropertySettings() { if (Globals.CurrentOutageID != null) { Guid OutageID = (Guid)Globals.CurrentOutageID; EOutage outage = new EOutage(OutageID); EUnit unit = new EUnit(outage.OutageUntID); if (unit.UnitSitID != (Guid)cboSiteFilter.SelectedValue) cboSiteFilter.SelectedValue = unit.UnitSitID; } cboSiteFilter.Enabled = (Globals.IsMasterDB); } } }
// // MtpSource.cs // // Author: // Gabriel Burt <[email protected]> // // Copyright (C) 2008 Novell, Inc. // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.Collections.Generic; using System.Threading; using Mono.Unix; using Hyena; using Hyena.Collections; using Mtp; using MTP = Mtp; using Banshee.Base; using Banshee.Dap; using Banshee.ServiceStack; using Banshee.Library; using Banshee.Sources; using Banshee.Playlist; using Banshee.Configuration; using Banshee.Collection; using Banshee.Collection.Database; using Banshee.Hardware; namespace Banshee.Dap.Mtp { public class MtpSource : DapSource { private MtpDevice mtp_device; //private bool supports_jpegs = false; private Dictionary<int, Track> track_map; private Dictionary<string, Album> album_cache = new Dictionary<string, Album> (); private bool supports_jpegs = false; private bool can_sync_albumart = NeverSyncAlbumArtSchema.Get () == false; private int thumb_width = AlbumArtWidthSchema.Get (); public override void DeviceInitialize (IDevice device) { base.DeviceInitialize (device); var portInfo = device.ResolveUsbPortInfo (); if (portInfo == null) { throw new InvalidDeviceException (); } //int busnum = portInfo.BusNumber; int devnum = portInfo.DeviceNumber; List<RawMtpDevice> devices = null; try { devices = MtpDevice.Detect (); } catch (TypeInitializationException e) { Log.Exception (e); Log.Error ( Catalog.GetString ("Error Initializing MTP Device Support"), Catalog.GetString ("There was an error intializing MTP device support. See http://www.banshee-project.org/Guide/DAPs/MTP for more information."), true ); throw new InvalidDeviceException (); } catch (Exception e) { Log.Exception (e); //ShowGeneralExceptionDialog (e); throw new InvalidDeviceException (); } IVolume volume = device as IVolume; foreach (var v in devices) { // Using the HAL hardware backend, HAL says the busnum is 2, but libmtp says it's 0, so disabling that check //if (v.BusNumber == busnum && v.DeviceNumber == devnum) { if (v.DeviceNumber == devnum) { // If gvfs-gphoto has it mounted, unmount it if (volume != null) { volume.Unmount (); } for (int i = 5; i > 0 && mtp_device == null; i--) { try { mtp_device = MtpDevice.Connect (v); } catch (Exception) {} if (mtp_device == null) { Log.DebugFormat ("Failed to connect to mtp device. Trying {0} more times...", i - 1); Thread.Sleep (2000); } } } } if (mtp_device == null) { throw new InvalidDeviceException (); } Name = mtp_device.Name; Initialize (); List<string> mimetypes = new List<string> (); foreach (FileType format in mtp_device.GetFileTypes ()) { if (format == FileType.JPEG) { supports_jpegs = true; } else { string mimetype = MtpDevice.GetMimeTypeFor (format); if (mimetype != null) { mimetypes.Add (mimetype); } } } AcceptableMimeTypes = mimetypes.ToArray (); AddDapProperty (Catalog.GetString ("Serial number"), mtp_device.SerialNumber); AddDapProperty (Catalog.GetString ("Version"), mtp_device.Version); try { AddDapProperty (Catalog.GetString ("Battery level"), String.Format ("{0:0%}", mtp_device.BatteryLevel/100.0)); } catch (Exception e) { Log.Exception ("Unable to get battery level from MTP device", e); } } protected override void LoadFromDevice () { track_map = new Dictionary<int, Track> (); try { List<Track> files = null; lock (mtp_device) { files = mtp_device.GetAllTracks (delegate (ulong current, ulong total, IntPtr data) { //user_event.Progress = (double)current / total; // Translators: {0} is the name of the MTP audio device (eg Gabe's Zen Player), {1} is the // track currently being loaded, and {2} is the total # of tracks that will be loaded. SetStatus (String.Format (Catalog.GetString ("Loading {0} - {1} of {2}"), Name, current, total), false); return 0; }); } /*if (user_event.IsCancelRequested) { return; }*/ // Delete any empty albums lock (mtp_device) { foreach (Album album in mtp_device.GetAlbums ()) { if (album.Count == 0) { album.Remove (); } } } int [] source_ids = new int [] { DbId }; foreach (Track mtp_track in files) { int track_id; if ((track_id = DatabaseTrackInfo.GetTrackIdForUri (MtpTrackInfo.GetPathFromMtpTrack (mtp_track), source_ids)) > 0) { track_map[track_id] = mtp_track; } else { MtpTrackInfo track = new MtpTrackInfo (mtp_device, mtp_track); track.PrimarySource = this; track.Save (false); track_map[track.TrackId] = mtp_track; } } Hyena.Data.Sqlite.HyenaSqliteCommand insert_cmd = new Hyena.Data.Sqlite.HyenaSqliteCommand ( @"INSERT INTO CorePlaylistEntries (PlaylistID, TrackID) SELECT ?, TrackID FROM CoreTracks WHERE PrimarySourceID = ? AND ExternalID = ?"); lock (mtp_device) { var playlists = mtp_device.GetPlaylists (); if (playlists != null) { foreach (MTP.Playlist playlist in playlists) { PlaylistSource pl_src = new PlaylistSource (playlist.Name, this); pl_src.Save (); // TODO a transaction would make sense here (when the threading issue is fixed) foreach (int id in playlist.TrackIds) { ServiceManager.DbConnection.Execute (insert_cmd, pl_src.DbId, this.DbId, id); } pl_src.UpdateCounts (); AddChildSource (pl_src); } } } } catch (Exception e) { Log.Exception (e); } OnTracksAdded (); } public override void Import () { Log.Information ("Import to Library is not implemented for MTP devices yet", true); //new LibraryImportManager (true).QueueSource (BaseDirectory); } public override void CopyTrackTo (DatabaseTrackInfo track, SafeUri uri, BatchUserJob job) { if (track_map.ContainsKey (track.TrackId)) { track_map[track.TrackId].Download (uri.LocalPath, delegate (ulong current, ulong total, IntPtr data) { job.DetailedProgress = (double) current / total; return 0; }); } else { throw new Exception ("Error copying track from MTP device"); } } public override void SyncPlaylists () { lock (mtp_device) { List<MTP.Playlist> device_playlists = new List<MTP.Playlist> (mtp_device.GetPlaylists ()); foreach (MTP.Playlist playlist in device_playlists) { playlist.Remove (); } device_playlists.Clear (); // Add playlists from Banshee to the device foreach (Source child in Children) { PlaylistSource from = child as PlaylistSource; if (from != null && from.Count > 0) { MTP.Playlist playlist = new MTP.Playlist (mtp_device, from.Name); foreach (int track_id in ServiceManager.DbConnection.QueryEnumerable<int> (String.Format ( "SELECT CoreTracks.ExternalID FROM {0} WHERE {1}", from.DatabaseTrackModel.ConditionFromFragment, from.DatabaseTrackModel.Condition))) { playlist.AddTrack (track_id); } playlist.Save (); } } } } public override bool CanRename { get { return !(IsAdding || IsDeleting); } } private static SafeUri empty_file = new SafeUri (Paths.Combine (Paths.ApplicationCache, "mtp.mp3")); protected override void OnTracksDeleted () { // Hack to get the disk usage indicate to be accurate, which seems to // only be updated when tracks are added, not removed. try { lock (mtp_device) { using (System.IO.TextWriter writer = new System.IO.StreamWriter (Banshee.IO.File.OpenWrite (empty_file, true))) { writer.Write ("foo"); } Track mtp_track = new Track (System.IO.Path.GetFileName (empty_file.LocalPath), 3); mtp_device.UploadTrack (empty_file.AbsolutePath, mtp_track, mtp_device.MusicFolder); mtp_device.Remove (mtp_track); Banshee.IO.File.Delete (empty_file); } } catch {} base.OnTracksDeleted (); } public override void Rename (string newName) { base.Rename (newName); lock (mtp_device) { mtp_device.Name = newName; } } private long bytes_used; public override long BytesUsed { get { if (Monitor.TryEnter (mtp_device)) { bytes_used = 0; foreach (DeviceStorage s in mtp_device.GetStorage ()) { bytes_used += (long) s.MaxCapacity - (long) s.FreeSpaceInBytes; } Monitor.Exit (mtp_device); } return bytes_used; } } private long bytes_capacity; public override long BytesCapacity { get { if (Monitor.TryEnter (mtp_device)) { bytes_capacity = 0; foreach (DeviceStorage s in mtp_device.GetStorage ()) { bytes_capacity += (long) s.MaxCapacity; } Monitor.Exit (mtp_device); } return bytes_capacity; } } public override bool IsReadOnly { get { return false; } } protected override void AddTrackToDevice (DatabaseTrackInfo track, SafeUri fromUri) { if (track.PrimarySourceId == DbId) return; lock (mtp_device) { Track mtp_track = TrackInfoToMtpTrack (track, fromUri); bool video = track.HasAttribute (TrackMediaAttributes.VideoStream); mtp_device.UploadTrack (fromUri.LocalPath, mtp_track, GetFolderForTrack (track), OnUploadProgress); // Add/update album art if (!video) { string key = MakeAlbumKey (track.AlbumArtist, track.AlbumTitle); if (!album_cache.ContainsKey (key)) { Album album = new Album (mtp_device, track.AlbumTitle, track.AlbumArtist, track.Genre, track.Composer); album.AddTrack (mtp_track); if (supports_jpegs && can_sync_albumart) { try { Gdk.Pixbuf pic = ServiceManager.Get<Banshee.Collection.Gui.ArtworkManager> ().LookupScalePixbuf ( track.ArtworkId, thumb_width ); if (pic != null) { byte [] bytes = pic.SaveToBuffer ("jpeg"); album.Save (bytes, (uint)pic.Width, (uint)pic.Height); Banshee.Collection.Gui.ArtworkManager.DisposePixbuf (pic); } album_cache[key] = album; } catch (Exception e) { Log.Debug ("Failed to create MTP Album", e.Message); } } else { album.Save (); album_cache[key] = album; } } else { Album album = album_cache[key]; album.AddTrack (mtp_track); album.Save (); } } MtpTrackInfo new_track = new MtpTrackInfo (mtp_device, mtp_track); new_track.PrimarySource = this; new_track.Save (false); track_map[new_track.TrackId] = mtp_track; } } private Folder GetFolderForTrack (TrackInfo track) { if (track.HasAttribute (TrackMediaAttributes.Podcast)) { return mtp_device.PodcastFolder; } else if (track.HasAttribute (TrackMediaAttributes.VideoStream)) { return mtp_device.VideoFolder; } else { return mtp_device.MusicFolder; } } private int OnUploadProgress (ulong sent, ulong total, IntPtr data) { AddTrackJob.DetailedProgress = (double) sent / (double) total; return 0; } protected override bool DeleteTrack (DatabaseTrackInfo track) { lock (mtp_device) { Track mtp_track = track_map [track.TrackId]; track_map.Remove (track.TrackId); // Remove from device mtp_device.Remove (mtp_track); // Remove track from album, and remove album from device if it no longer has tracks string key = MakeAlbumKey (track.ArtistName, track.AlbumTitle); if (album_cache.ContainsKey (key)) { Album album = album_cache[key]; album.RemoveTrack (mtp_track); if (album.Count == 0) { album.Remove (); album_cache.Remove (key); } } return true; } } public Track TrackInfoToMtpTrack (TrackInfo track, SafeUri fromUri) { Track f = new Track (System.IO.Path.GetFileName (fromUri.LocalPath), (ulong) Banshee.IO.File.GetSize (fromUri)); MtpTrackInfo.ToMtpTrack (track, f); return f; } private bool disposed = false; public override void Dispose () { if (disposed) return; disposed = true; base.Dispose (); if (mtp_device != null) { lock (mtp_device) { mtp_device.Dispose (); } } ServiceManager.SourceManager.RemoveSource (this); mtp_device = null; } protected override void Eject () { base.Eject (); Dispose (); } private static string MakeAlbumKey (string album_artist, string album) { return String.Format ("{0}_{1}", album_artist, album); } public static readonly SchemaEntry<bool> NeverSyncAlbumArtSchema = new SchemaEntry<bool>( "plugins.mtp", "never_sync_albumart", false, "Album art disabled", "Regardless of device's capabilities, do not sync album art" ); public static readonly SchemaEntry<int> AlbumArtWidthSchema = new SchemaEntry<int>( "plugins.mtp", "albumart_max_width", 170, "Album art max width", "The maximum width to allow for album art." ); } }
using System; using System.Collections.Generic; using System.Linq; using Should; using Xunit; namespace AutoMapper.UnitTests { namespace BidirectionalRelationships { public class When_mapping_to_a_destination_with_a_bidirectional_parent_one_to_many_child_relationship : AutoMapperSpecBase { private ParentDto _dto; protected override MapperConfiguration Configuration => new MapperConfiguration(cfg => { cfg.CreateMap<ParentModel, ParentDto>(); cfg.CreateMap<ChildModel, ChildDto>(); }); protected override void Because_of() { var parent = new ParentModel { ID = "PARENT_ONE" }; parent.AddChild(new ChildModel { ID = "CHILD_ONE" }); parent.AddChild(new ChildModel { ID = "CHILD_TWO" }); _dto = Mapper.Map<ParentModel, ParentDto>(parent); } [Fact] public void Should_preserve_the_parent_child_relationship_on_the_destination() { _dto.Children[0].Parent.ShouldBeSameAs(_dto); _dto.Children[1].Parent.ShouldBeSameAs(_dto); } public class ParentModel { public ParentModel() { Children = new List<ChildModel>(); } public string ID { get; set; } public IList<ChildModel> Children { get; private set; } public void AddChild(ChildModel child) { child.Parent = this; Children.Add(child); } } public class ChildModel { public string ID { get; set; } public ParentModel Parent { get; set; } } public class ParentDto { public string ID { get; set; } public IList<ChildDto> Children { get; set; } } public class ChildDto { public string ID { get; set; } public ParentDto Parent { get; set; } } } //public class When_mapping_to_a_destination_with_a_bidirectional_parent_one_to_many_child_relationship_using_CustomMapper_StackOverflow : AutoMapperSpecBase //{ // private ParentDto _dto; // private ParentModel _parent; // protected override void Establish_context() // { // _parent = new ParentModel // { // ID = 2 // }; // List<ChildModel> childModels = new List<ChildModel> // { // new ChildModel // { // ID = 1, // Parent = _parent // } // }; // Dictionary<int, ParentModel> parents = childModels.ToDictionary(x => x.ID, x => x.Parent); // Mapper.CreateMap<int, ParentDto>().ConvertUsing(new ChildIdToParentDtoConverter(parents)); // Mapper.CreateMap<int, List<ChildDto>>().ConvertUsing(new ParentIdToChildDtoListConverter(childModels)); // Mapper.CreateMap<ParentModel, ParentDto>() // .ForMember(dest => dest.Children, opt => opt.MapFrom(src => src.ID)); // Mapper.CreateMap<ChildModel, ChildDto>(); // config.AssertConfigurationIsValid(); // } // protected override void Because_of() // { // _dto = Mapper.Map<ParentModel, ParentDto>(_parent); // } // [Fact(Skip = "This test breaks the Test Runner")] // public void Should_preserve_the_parent_child_relationship_on_the_destination() // { // _dto.Children[0].Parent.ID.ShouldEqual(_dto.ID); // } // public class ChildIdToParentDtoConverter : TypeConverter<int, ParentDto> // { // private readonly Dictionary<int, ParentModel> _parentModels; // public ChildIdToParentDtoConverter(Dictionary<int, ParentModel> parentModels) // { // _parentModels = parentModels; // } // protected override ParentDto ConvertCore(int childId) // { // ParentModel parentModel = _parentModels[childId]; // MappingEngine mappingEngine = (MappingEngine)Mapper.Engine; // return mappingEngine.Map<ParentModel, ParentDto>(parentModel); // } // } // public class ParentIdToChildDtoListConverter : TypeConverter<int, List<ChildDto>> // { // private readonly IList<ChildModel> _childModels; // public ParentIdToChildDtoListConverter(IList<ChildModel> childModels) // { // _childModels = childModels; // } // protected override List<ChildDto> ConvertCore(int childId) // { // List<ChildModel> childModels = _childModels.Where(x => x.Parent.ID == childId).ToList(); // MappingEngine mappingEngine = (MappingEngine)Mapper.Engine; // return mappingEngine.Map<List<ChildModel>, List<ChildDto>>(childModels); // } // } // public class ParentModel // { // public int ID { get; set; } // } // public class ChildModel // { // public int ID { get; set; } // public ParentModel Parent { get; set; } // } // public class ParentDto // { // public int ID { get; set; } // public List<ChildDto> Children { get; set; } // } // public class ChildDto // { // public int ID { get; set; } // public ParentDto Parent { get; set; } // } //} public class When_mapping_to_a_destination_with_a_bidirectional_parent_one_to_many_child_relationship_using_CustomMapper_with_context : AutoMapperSpecBase { private ParentDto _dto; private static ParentModel _parent; protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg => { _parent = new ParentModel { ID = 2 }; List<ChildModel> childModels = new List<ChildModel> { new ChildModel { ID = 1, Parent = _parent } }; Dictionary<int, ParentModel> parents = childModels.ToDictionary(x => x.ID, x => x.Parent); cfg.CreateMap<int, ParentDto>().ConvertUsing(new ChildIdToParentDtoConverter(parents)); cfg.CreateMap<int, List<ChildDto>>().ConvertUsing(new ParentIdToChildDtoListConverter(childModels)); cfg.CreateMap<ParentModel, ParentDto>() .ForMember(dest => dest.Children, opt => opt.MapFrom(src => src.ID)); cfg.CreateMap<ChildModel, ChildDto>(); }); protected override void Because_of() { _dto = Mapper.Map<ParentModel, ParentDto>(_parent); } [Fact] public void Should_preserve_the_parent_child_relationship_on_the_destination() { _dto.Children[0].Parent.ID.ShouldEqual(_dto.ID); } public class ChildIdToParentDtoConverter : ITypeConverter<int, ParentDto> { private readonly Dictionary<int, ParentModel> _parentModels; public ChildIdToParentDtoConverter(Dictionary<int, ParentModel> parentModels) { _parentModels = parentModels; } public ParentDto Convert(ResolutionContext resolutionContext) { int childId = (int) resolutionContext.SourceValue; ParentModel parentModel = _parentModels[childId]; var context = resolutionContext.CreateTypeContext( resolutionContext.Engine.ConfigurationProvider.ResolveTypeMap(typeof (ParentModel), typeof (ParentDto)), parentModel, null, typeof (ParentModel), typeof (ParentDto)); return (ParentDto) resolutionContext.Engine.Map(context); } } public class ParentIdToChildDtoListConverter : ITypeConverter<int, List<ChildDto>> { private readonly IList<ChildModel> _childModels; public ParentIdToChildDtoListConverter(IList<ChildModel> childModels) { _childModels = childModels; } public List<ChildDto> Convert(ResolutionContext resolutionContext) { int childId = (int)resolutionContext.SourceValue; List<ChildModel> childModels = _childModels.Where(x => x.Parent.ID == childId).ToList(); var context = resolutionContext.CreateTypeContext( null, childModels, null, typeof(List<ChildModel>), typeof(List<ChildDto>)); return (List<ChildDto>) context.Engine.Map(context); } } public class ParentModel { public int ID { get; set; } } public class ChildModel { public int ID { get; set; } public ParentModel Parent { get; set; } } public class ParentDto { public int ID { get; set; } public List<ChildDto> Children { get; set; } } public class ChildDto { public int ID { get; set; } public ParentDto Parent { get; set; } } } public class When_mapping_to_a_destination_with_a_bidirectional_parent_one_to_one_child_relationship : AutoMapperSpecBase { private FooDto _dto; protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg => { cfg.CreateMap<Foo, FooDto>(); cfg.CreateMap<Bar, BarDto>(); }); protected override void Because_of() { var foo = new Foo { Bar = new Bar { Value = "something" } }; foo.Bar.Foo = foo; _dto = Mapper.Map<Foo, FooDto>(foo); } [Fact] public void Should_preserve_the_parent_child_relationship_on_the_destination() { _dto.Bar.Foo.ShouldBeSameAs(_dto); } public class Foo { public Bar Bar { get; set; } } public class Bar { public Foo Foo { get; set; } public string Value { get; set; } } public class FooDto { public BarDto Bar { get; set; } } public class BarDto { public FooDto Foo { get; set; } public string Value { get; set; } } } public class When_mapping_to_a_destination_containing_two_dtos_mapped_from_the_same_source : AutoMapperSpecBase { private FooContainerModel _dto; protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg => { cfg.CreateMap<FooModel, FooScreenModel>(); cfg.CreateMap<FooModel, FooInputModel>(); cfg.CreateMap<FooModel, FooContainerModel>() .ForMember(dest => dest.Input, opt => opt.MapFrom(src => src)) .ForMember(dest => dest.Screen, opt => opt.MapFrom(src => src)); }); protected override void Because_of() { var model = new FooModel { Id = 3 }; _dto = Mapper.Map<FooModel, FooContainerModel>(model); } [Fact] public void Should_not_preserve_identity_when_destinations_are_incompatible() { _dto.ShouldBeType<FooContainerModel>(); _dto.Input.ShouldBeType<FooInputModel>(); _dto.Screen.ShouldBeType<FooScreenModel>(); _dto.Input.Id.ShouldEqual(3); _dto.Screen.Id.ShouldEqual("3"); } public class FooContainerModel { public FooInputModel Input { get; set; } public FooScreenModel Screen { get; set; } } public class FooScreenModel { public string Id { get; set; } } public class FooInputModel { public long Id { get; set; } } public class FooModel { public long Id { get; set; } } } public class When_mapping_with_a_bidirectional_relationship_that_includes_arrays : AutoMapperSpecBase { private ParentDto _dtoParent; protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg => { cfg.CreateMap<Parent, ParentDto>(); cfg.CreateMap<Child, ChildDto>(); }); protected override void Because_of() { var parent1 = new Parent { Name = "Parent 1" }; var child1 = new Child { Name = "Child 1" }; parent1.Children.Add(child1); child1.Parents.Add(parent1); _dtoParent = Mapper.Map<Parent, ParentDto>(parent1); } [Fact] public void Should_map_successfully() { object.ReferenceEquals(_dtoParent.Children[0].Parents[0], _dtoParent).ShouldBeTrue(); } public class Parent { public Guid Id { get; private set; } public string Name { get; set; } public List<Child> Children { get; set; } public Parent() { Id = Guid.NewGuid(); Children = new List<Child>(); } public bool Equals(Parent other) { if (ReferenceEquals(null, other)) return false; if (ReferenceEquals(this, other)) return true; return other.Id.Equals(Id); } public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) return false; if (ReferenceEquals(this, obj)) return true; if (obj.GetType() != typeof (Parent)) return false; return Equals((Parent) obj); } public override int GetHashCode() { return Id.GetHashCode(); } } public class Child { public Guid Id { get; private set; } public string Name { get; set; } public List<Parent> Parents { get; set; } public Child() { Id = Guid.NewGuid(); Parents = new List<Parent>(); } public bool Equals(Child other) { if (ReferenceEquals(null, other)) return false; if (ReferenceEquals(this, other)) return true; return other.Id.Equals(Id); } public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) return false; if (ReferenceEquals(this, obj)) return true; if (obj.GetType() != typeof (Child)) return false; return Equals((Child) obj); } public override int GetHashCode() { return Id.GetHashCode(); } } public class ParentDto { public Guid Id { get; set; } public string Name { get; set; } public List<ChildDto> Children { get; set; } public ParentDto() { Children = new List<ChildDto>(); } } public class ChildDto { public Guid Id { get; set; } public string Name { get; set; } public List<ParentDto> Parents { get; set; } public ChildDto() { Parents = new List<ParentDto>(); } } } public class When_disabling_instance_cache_for_instances { public class Tag { public int Id { get; set; } public string Name { get; set; } public IEnumerable<Tag> ChildTags { get; set; } protected bool Equals(Tag other) { return Id == other.Id; } public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) return false; if (ReferenceEquals(this, obj)) return true; if (obj.GetType() != this.GetType()) return false; return Equals((Tag) obj); } public override int GetHashCode() { return Id; } } [Fact] public void Test() { var tags = new List<Tag> { new Tag { Id = 1, Name = "Tag 1", ChildTags = new List<Tag> { new Tag { Id = 2, Name = "Tag 2", ChildTags = new List<Tag> { new Tag {Id = 3, Name = "Tag 3"}, new Tag {Id = 4, Name = "Tag 4"} } } } }, new Tag {Id = 1, Name = "Tag 1"}, new Tag { Id = 3, Name = "Tag 3", ChildTags = new List<Tag> { new Tag {Id = 4, Name = "Tag 4"} } } }; var config = new MapperConfiguration(cfg => cfg.CreateMap<Tag, Tag>().ForMember(dest => dest.ChildTags, opt => opt.MapFrom(src => src.ChildTags))); var mapper = config.CreateMapper(); var result = mapper.Map<IList<Tag>, IList<Tag>>(tags, opt => opt.DisableCache = true); result[1].ChildTags.Count().ShouldEqual(0); result[2].ChildTags.Count().ShouldEqual(1); result[2].ChildTags.First().Id.ShouldEqual(4); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. namespace System.Globalization { /// <remarks> /// Calendar support range: /// Calendar Minimum Maximum /// ========== ========== ========== /// Gregorian 1901/02/19 2101/01/28 /// ChineseLunisolar 1901/01/01 2100/12/29 /// </remarks> public class ChineseLunisolarCalendar : EastAsianLunisolarCalendar { public const int ChineseEra = 1; private const int MinLunisolarYear = 1901; private const int MaxLunisolarYear = 2100; private static readonly DateTime s_minDate = new DateTime(1901, 2, 19); private static readonly DateTime s_maxDate = new DateTime((new DateTime(2101, 1, 28, 23, 59, 59, 999)).Ticks + 9999); public override DateTime MinSupportedDateTime => s_minDate; public override DateTime MaxSupportedDateTime => s_maxDate; protected override int DaysInYearBeforeMinSupportedYear { get { // 1900: 1-29 2-30 3-29 4-29 5-30 6-29 7-30 8-30 Leap8-29 9-30 10-30 11-29 12-30 from Calendrical Tabulations [1] // [1] Reingold, Edward M, and Nachum Dershowitz. Calendrical Tabulations, 1900 - 2200.Cambridge: Cambridge Univ. Press, 2002.Print. return 384; } } // Data for years 1901-1905 and 1907-2100 matches output of Calendrical Calculations [2] and published calendar tables [3]. // For 1906, month 4 of the Chinese year starts on 24 Apr 1906 and has 29 days. This is historially accurate // but different to the values in [1] and output from [2]. This is due to a change in the astronomical methods used // by the Chinese to calculate the calendar from 1913 onwards (see warnings in [1]). // [2] Reingold, Edward M, and Nachum Dershowitz. Calendrical Calculations: The Ultimate Edition. Cambridge [etc.: Cambridge University Press, 2018. Print. // [3] Wang, Jianmin. Xin Bian Wan Nian Li: (1840-2050) Chong Bian Ben. Beijing: Ke xue pu ji chu ban she, 1990. Print. private static readonly int[,] s_yinfo = { /*Y LM Lmon Lday DaysPerMonth D1 D2 D3 D4 D5 D6 D7 D8 D9 D10 D11 D12 D13 #Days 1901 */ { 00, 02, 19, 0b0100101011100000 }, /* 29 30 29 29 30 29 30 29 30 30 30 29 354 1902 */ { 00, 02, 08, 0b1010010101110000 }, /* 30 29 30 29 29 30 29 30 29 30 30 30 355 1903 */ { 05, 01, 29, 0b0101001001101000 }, /* 29 30 29 30 29 29 30 29 29 30 30 29 30 383 1904 */ { 00, 02, 16, 0b1101001001100000 }, /* 30 30 29 30 29 29 30 29 29 30 30 29 354 1905 */ { 00, 02, 04, 0b1101100101010000 }, /* 30 30 29 30 30 29 29 30 29 30 29 30 355 1906 */ { 04, 01, 25, 0b0110101010101000 }, /* 29 30 30 29 30 29 30 29 30 29 30 29 30 384 1907 */ { 00, 02, 13, 0b0101011010100000 }, /* 29 30 29 30 29 30 30 29 30 29 30 29 354 1908 */ { 00, 02, 02, 0b1001101011010000 }, /* 30 29 29 30 30 29 30 29 30 30 29 30 355 1909 */ { 02, 01, 22, 0b0100101011101000 }, /* 29 30 29 29 30 29 30 29 30 30 30 29 30 384 1910 */ { 00, 02, 10, 0b0100101011100000 }, /* 29 30 29 29 30 29 30 29 30 30 30 29 354 1911 */ { 06, 01, 30, 0b1010010011011000 }, /* 30 29 30 29 29 30 29 29 30 30 29 30 30 384 1912 */ { 00, 02, 18, 0b1010010011010000 }, /* 30 29 30 29 29 30 29 29 30 30 29 30 354 1913 */ { 00, 02, 06, 0b1101001001010000 }, /* 30 30 29 30 29 29 30 29 29 30 29 30 354 1914 */ { 05, 01, 26, 0b1101010100101000 }, /* 30 30 29 30 29 30 29 30 29 29 30 29 30 384 1915 */ { 00, 02, 14, 0b1011010101000000 }, /* 30 29 30 30 29 30 29 30 29 30 29 29 354 1916 */ { 00, 02, 03, 0b1101011010100000 }, /* 30 30 29 30 29 30 30 29 30 29 30 29 355 1917 */ { 02, 01, 23, 0b1001011011010000 }, /* 30 29 29 30 29 30 30 29 30 30 29 30 29 384 1918 */ { 00, 02, 11, 0b1001010110110000 }, /* 30 29 29 30 29 30 29 30 30 29 30 30 355 1919 */ { 07, 02, 01, 0b0100100110111000 }, /* 29 30 29 29 30 29 29 30 30 29 30 30 30 384 1920 */ { 00, 02, 20, 0b0100100101110000 }, /* 29 30 29 29 30 29 29 30 29 30 30 30 354 1921 */ { 00, 02, 08, 0b1010010010110000 }, /* 30 29 30 29 29 30 29 29 30 29 30 30 354 1922 */ { 05, 01, 28, 0b1011001001011000 }, /* 30 29 30 30 29 29 30 29 29 30 29 30 30 384 1923 */ { 00, 02, 16, 0b0110101001010000 }, /* 29 30 30 29 30 29 30 29 29 30 29 30 354 1924 */ { 00, 02, 05, 0b0110110101000000 }, /* 29 30 30 29 30 30 29 30 29 30 29 29 354 1925 */ { 04, 01, 24, 0b1010110110101000 }, /* 30 29 30 29 30 30 29 30 30 29 30 29 30 385 1926 */ { 00, 02, 13, 0b0010101101100000 }, /* 29 29 30 29 30 29 30 30 29 30 30 29 354 1927 */ { 00, 02, 02, 0b1001010101110000 }, /* 30 29 29 30 29 30 29 30 29 30 30 30 355 1928 */ { 02, 01, 23, 0b0100100101111000 }, /* 29 30 29 29 30 29 29 30 29 30 30 30 30 384 1929 */ { 00, 02, 10, 0b0100100101110000 }, /* 29 30 29 29 30 29 29 30 29 30 30 30 354 1930 */ { 06, 01, 30, 0b0110010010110000 }, /* 29 30 30 29 29 30 29 29 30 29 30 30 29 383 1931 */ { 00, 02, 17, 0b1101010010100000 }, /* 30 30 29 30 29 30 29 29 30 29 30 29 354 1932 */ { 00, 02, 06, 0b1110101001010000 }, /* 30 30 30 29 30 29 30 29 29 30 29 30 355 1933 */ { 05, 01, 26, 0b0110110101001000 }, /* 29 30 30 29 30 30 29 30 29 30 29 29 30 384 1934 */ { 00, 02, 14, 0b0101101011010000 }, /* 29 30 29 30 30 29 30 29 30 30 29 30 355 1935 */ { 00, 02, 04, 0b0010101101100000 }, /* 29 29 30 29 30 29 30 30 29 30 30 29 354 1936 */ { 03, 01, 24, 0b1001001101110000 }, /* 30 29 29 30 29 29 30 30 29 30 30 30 29 384 1937 */ { 00, 02, 11, 0b1001001011100000 }, /* 30 29 29 30 29 29 30 29 30 30 30 29 354 1938 */ { 07, 01, 31, 0b1100100101101000 }, /* 30 30 29 29 30 29 29 30 29 30 30 29 30 384 1939 */ { 00, 02, 19, 0b1100100101010000 }, /* 30 30 29 29 30 29 29 30 29 30 29 30 354 1940 */ { 00, 02, 08, 0b1101010010100000 }, /* 30 30 29 30 29 30 29 29 30 29 30 29 354 1941 */ { 06, 01, 27, 0b1101101001010000 }, /* 30 30 29 30 30 29 30 29 29 30 29 30 29 384 1942 */ { 00, 02, 15, 0b1011010101010000 }, /* 30 29 30 30 29 30 29 30 29 30 29 30 355 1943 */ { 00, 02, 05, 0b0101011010100000 }, /* 29 30 29 30 29 30 30 29 30 29 30 29 354 1944 */ { 04, 01, 25, 0b1010101011011000 }, /* 30 29 30 29 30 29 30 29 30 30 29 30 30 385 1945 */ { 00, 02, 13, 0b0010010111010000 }, /* 29 29 30 29 29 30 29 30 30 30 29 30 354 1946 */ { 00, 02, 02, 0b1001001011010000 }, /* 30 29 29 30 29 29 30 29 30 30 29 30 354 1947 */ { 02, 01, 22, 0b1100100101011000 }, /* 30 30 29 29 30 29 29 30 29 30 29 30 30 384 1948 */ { 00, 02, 10, 0b1010100101010000 }, /* 30 29 30 29 30 29 29 30 29 30 29 30 354 1949 */ { 07, 01, 29, 0b1011010010101000 }, /* 30 29 30 30 29 30 29 29 30 29 30 29 30 384 1950 */ { 00, 02, 17, 0b0110110010100000 }, /* 29 30 30 29 30 30 29 29 30 29 30 29 354 1951 */ { 00, 02, 06, 0b1011010101010000 }, /* 30 29 30 30 29 30 29 30 29 30 29 30 355 1952 */ { 05, 01, 27, 0b0101010110101000 }, /* 29 30 29 30 29 30 29 30 30 29 30 29 30 384 1953 */ { 00, 02, 14, 0b0100110110100000 }, /* 29 30 29 29 30 30 29 30 30 29 30 29 354 1954 */ { 00, 02, 03, 0b1010010110110000 }, /* 30 29 30 29 29 30 29 30 30 29 30 30 355 1955 */ { 03, 01, 24, 0b0101001010111000 }, /* 29 30 29 30 29 29 30 29 30 29 30 30 30 384 1956 */ { 00, 02, 12, 0b0101001010110000 }, /* 29 30 29 30 29 29 30 29 30 29 30 30 354 1957 */ { 08, 01, 31, 0b1010100101010000 }, /* 30 29 30 29 30 29 29 30 29 30 29 30 29 383 1958 */ { 00, 02, 18, 0b1110100101010000 }, /* 30 30 30 29 30 29 29 30 29 30 29 30 355 1959 */ { 00, 02, 08, 0b0110101010100000 }, /* 29 30 30 29 30 29 30 29 30 29 30 29 354 1960 */ { 06, 01, 28, 0b1010110101010000 }, /* 30 29 30 29 30 30 29 30 29 30 29 30 29 384 1961 */ { 00, 02, 15, 0b1010101101010000 }, /* 30 29 30 29 30 29 30 30 29 30 29 30 355 1962 */ { 00, 02, 05, 0b0100101101100000 }, /* 29 30 29 29 30 29 30 30 29 30 30 29 354 1963 */ { 04, 01, 25, 0b1010010101110000 }, /* 30 29 30 29 29 30 29 30 29 30 30 30 29 384 1964 */ { 00, 02, 13, 0b1010010101110000 }, /* 30 29 30 29 29 30 29 30 29 30 30 30 355 1965 */ { 00, 02, 02, 0b0101001001100000 }, /* 29 30 29 30 29 29 30 29 29 30 30 29 353 1966 */ { 03, 01, 21, 0b1110100100110000 }, /* 30 30 30 29 30 29 29 30 29 29 30 30 29 384 1967 */ { 00, 02, 09, 0b1101100101010000 }, /* 30 30 29 30 30 29 29 30 29 30 29 30 355 1968 */ { 07, 01, 30, 0b0101101010101000 }, /* 29 30 29 30 30 29 30 29 30 29 30 29 30 384 1969 */ { 00, 02, 17, 0b0101011010100000 }, /* 29 30 29 30 29 30 30 29 30 29 30 29 354 1970 */ { 00, 02, 06, 0b1001011011010000 }, /* 30 29 29 30 29 30 30 29 30 30 29 30 355 1971 */ { 05, 01, 27, 0b0100101011101000 }, /* 29 30 29 29 30 29 30 29 30 30 30 29 30 384 1972 */ { 00, 02, 15, 0b0100101011010000 }, /* 29 30 29 29 30 29 30 29 30 30 29 30 354 1973 */ { 00, 02, 03, 0b1010010011010000 }, /* 30 29 30 29 29 30 29 29 30 30 29 30 354 1974 */ { 04, 01, 23, 0b1101001001101000 }, /* 30 30 29 30 29 29 30 29 29 30 30 29 30 384 1975 */ { 00, 02, 11, 0b1101001001010000 }, /* 30 30 29 30 29 29 30 29 29 30 29 30 354 1976 */ { 08, 01, 31, 0b1101010100101000 }, /* 30 30 29 30 29 30 29 30 29 29 30 29 30 384 1977 */ { 00, 02, 18, 0b1011010101000000 }, /* 30 29 30 30 29 30 29 30 29 30 29 29 354 1978 */ { 00, 02, 07, 0b1011011010100000 }, /* 30 29 30 30 29 30 30 29 30 29 30 29 355 1979 */ { 06, 01, 28, 0b1001011011010000 }, /* 30 29 29 30 29 30 30 29 30 30 29 30 29 384 1980 */ { 00, 02, 16, 0b1001010110110000 }, /* 30 29 29 30 29 30 29 30 30 29 30 30 355 1981 */ { 00, 02, 05, 0b0100100110110000 }, /* 29 30 29 29 30 29 29 30 30 29 30 30 354 1982 */ { 04, 01, 25, 0b1010010010111000 }, /* 30 29 30 29 29 30 29 29 30 29 30 30 30 384 1983 */ { 00, 02, 13, 0b1010010010110000 }, /* 30 29 30 29 29 30 29 29 30 29 30 30 354 1984 */ { 10, 02, 02, 0b1011001001011000 }, /* 30 29 30 30 29 29 30 29 29 30 29 30 30 384 1985 */ { 00, 02, 20, 0b0110101001010000 }, /* 29 30 30 29 30 29 30 29 29 30 29 30 354 1986 */ { 00, 02, 09, 0b0110110101000000 }, /* 29 30 30 29 30 30 29 30 29 30 29 29 354 1987 */ { 06, 01, 29, 0b1010110110100000 }, /* 30 29 30 29 30 30 29 30 30 29 30 29 29 384 1988 */ { 00, 02, 17, 0b1010101101100000 }, /* 30 29 30 29 30 29 30 30 29 30 30 29 355 1989 */ { 00, 02, 06, 0b1001010101110000 }, /* 30 29 29 30 29 30 29 30 29 30 30 30 355 1990 */ { 05, 01, 27, 0b0100100101111000 }, /* 29 30 29 29 30 29 29 30 29 30 30 30 30 384 1991 */ { 00, 02, 15, 0b0100100101110000 }, /* 29 30 29 29 30 29 29 30 29 30 30 30 354 1992 */ { 00, 02, 04, 0b0110010010110000 }, /* 29 30 30 29 29 30 29 29 30 29 30 30 354 1993 */ { 03, 01, 23, 0b0110101001010000 }, /* 29 30 30 29 30 29 30 29 29 30 29 30 29 383 1994 */ { 00, 02, 10, 0b1110101001010000 }, /* 30 30 30 29 30 29 30 29 29 30 29 30 355 1995 */ { 08, 01, 31, 0b0110101100101000 }, /* 29 30 30 29 30 29 30 30 29 29 30 29 30 384 1996 */ { 00, 02, 19, 0b0101101011000000 }, /* 29 30 29 30 30 29 30 29 30 30 29 29 354 1997 */ { 00, 02, 07, 0b1010101101100000 }, /* 30 29 30 29 30 29 30 30 29 30 30 29 355 1998 */ { 05, 01, 28, 0b1001001101101000 }, /* 30 29 29 30 29 29 30 30 29 30 30 29 30 384 1999 */ { 00, 02, 16, 0b1001001011100000 }, /* 30 29 29 30 29 29 30 29 30 30 30 29 354 2000 */ { 00, 02, 05, 0b1100100101100000 }, /* 30 30 29 29 30 29 29 30 29 30 30 29 354 2001 */ { 04, 01, 24, 0b1101010010101000 }, /* 30 30 29 30 29 30 29 29 30 29 30 29 30 384 2002 */ { 00, 02, 12, 0b1101010010100000 }, /* 30 30 29 30 29 30 29 29 30 29 30 29 354 2003 */ { 00, 02, 01, 0b1101101001010000 }, /* 30 30 29 30 30 29 30 29 29 30 29 30 355 2004 */ { 02, 01, 22, 0b0101101010101000 }, /* 29 30 29 30 30 29 30 29 30 29 30 29 30 384 2005 */ { 00, 02, 09, 0b0101011010100000 }, /* 29 30 29 30 29 30 30 29 30 29 30 29 354 2006 */ { 07, 01, 29, 0b1010101011011000 }, /* 30 29 30 29 30 29 30 29 30 30 29 30 30 385 2007 */ { 00, 02, 18, 0b0010010111010000 }, /* 29 29 30 29 29 30 29 30 30 30 29 30 354 2008 */ { 00, 02, 07, 0b1001001011010000 }, /* 30 29 29 30 29 29 30 29 30 30 29 30 354 2009 */ { 05, 01, 26, 0b1100100101011000 }, /* 30 30 29 29 30 29 29 30 29 30 29 30 30 384 2010 */ { 00, 02, 14, 0b1010100101010000 }, /* 30 29 30 29 30 29 29 30 29 30 29 30 354 2011 */ { 00, 02, 03, 0b1011010010100000 }, /* 30 29 30 30 29 30 29 29 30 29 30 29 354 2012 */ { 04, 01, 23, 0b1011010101010000 }, /* 30 29 30 30 29 30 29 30 29 30 29 30 29 384 2013 */ { 00, 02, 10, 0b1010110101010000 }, /* 30 29 30 29 30 30 29 30 29 30 29 30 355 2014 */ { 09, 01, 31, 0b0101010110101000 }, /* 29 30 29 30 29 30 29 30 30 29 30 29 30 384 2015 */ { 00, 02, 19, 0b0100101110100000 }, /* 29 30 29 29 30 29 30 30 30 29 30 29 354 2016 */ { 00, 02, 08, 0b1010010110110000 }, /* 30 29 30 29 29 30 29 30 30 29 30 30 355 2017 */ { 06, 01, 28, 0b0101001010111000 }, /* 29 30 29 30 29 29 30 29 30 29 30 30 30 384 2018 */ { 00, 02, 16, 0b0101001010110000 }, /* 29 30 29 30 29 29 30 29 30 29 30 30 354 2019 */ { 00, 02, 05, 0b1010100100110000 }, /* 30 29 30 29 30 29 29 30 29 29 30 30 354 2020 */ { 04, 01, 25, 0b0111010010101000 }, /* 29 30 30 30 29 30 29 29 30 29 30 29 30 384 2021 */ { 00, 02, 12, 0b0110101010100000 }, /* 29 30 30 29 30 29 30 29 30 29 30 29 354 2022 */ { 00, 02, 01, 0b1010110101010000 }, /* 30 29 30 29 30 30 29 30 29 30 29 30 355 2023 */ { 02, 01, 22, 0b0100110110101000 }, /* 29 30 29 29 30 30 29 30 30 29 30 29 30 384 2024 */ { 00, 02, 10, 0b0100101101100000 }, /* 29 30 29 29 30 29 30 30 29 30 30 29 354 2025 */ { 06, 01, 29, 0b1010010101110000 }, /* 30 29 30 29 29 30 29 30 29 30 30 30 29 384 2026 */ { 00, 02, 17, 0b1010010011100000 }, /* 30 29 30 29 29 30 29 29 30 30 30 29 354 2027 */ { 00, 02, 06, 0b1101001001100000 }, /* 30 30 29 30 29 29 30 29 29 30 30 29 354 2028 */ { 05, 01, 26, 0b1110100100110000 }, /* 30 30 30 29 30 29 29 30 29 29 30 30 29 384 2029 */ { 00, 02, 13, 0b1101010100110000 }, /* 30 30 29 30 29 30 29 30 29 29 30 30 355 2030 */ { 00, 02, 03, 0b0101101010100000 }, /* 29 30 29 30 30 29 30 29 30 29 30 29 354 2031 */ { 03, 01, 23, 0b0110101101010000 }, /* 29 30 30 29 30 29 30 30 29 30 29 30 29 384 2032 */ { 00, 02, 11, 0b1001011011010000 }, /* 30 29 29 30 29 30 30 29 30 30 29 30 355 2033 */ { 11, 01, 31, 0b0100101011101000 }, /* 29 30 29 29 30 29 30 29 30 30 30 29 30 384 2034 */ { 00, 02, 19, 0b0100101011010000 }, /* 29 30 29 29 30 29 30 29 30 30 29 30 354 2035 */ { 00, 02, 08, 0b1010010011010000 }, /* 30 29 30 29 29 30 29 29 30 30 29 30 354 2036 */ { 06, 01, 28, 0b1101001001011000 }, /* 30 30 29 30 29 29 30 29 29 30 29 30 30 384 2037 */ { 00, 02, 15, 0b1101001001010000 }, /* 30 30 29 30 29 29 30 29 29 30 29 30 354 2038 */ { 00, 02, 04, 0b1101010100100000 }, /* 30 30 29 30 29 30 29 30 29 29 30 29 354 2039 */ { 05, 01, 24, 0b1101101010100000 }, /* 30 30 29 30 30 29 30 29 30 29 30 29 29 384 2040 */ { 00, 02, 12, 0b1011010110100000 }, /* 30 29 30 30 29 30 29 30 30 29 30 29 355 2041 */ { 00, 02, 01, 0b0101011011010000 }, /* 29 30 29 30 29 30 30 29 30 30 29 30 355 2042 */ { 02, 01, 22, 0b0100101011011000 }, /* 29 30 29 29 30 29 30 29 30 30 29 30 30 384 2043 */ { 00, 02, 10, 0b0100100110110000 }, /* 29 30 29 29 30 29 29 30 30 29 30 30 354 2044 */ { 07, 01, 30, 0b1010010010111000 }, /* 30 29 30 29 29 30 29 29 30 29 30 30 30 384 2045 */ { 00, 02, 17, 0b1010010010110000 }, /* 30 29 30 29 29 30 29 29 30 29 30 30 354 2046 */ { 00, 02, 06, 0b1010101001010000 }, /* 30 29 30 29 30 29 30 29 29 30 29 30 354 2047 */ { 05, 01, 26, 0b1011010100101000 }, /* 30 29 30 30 29 30 29 30 29 29 30 29 30 384 2048 */ { 00, 02, 14, 0b0110110100100000 }, /* 29 30 30 29 30 30 29 30 29 29 30 29 354 2049 */ { 00, 02, 02, 0b1010110110100000 }, /* 30 29 30 29 30 30 29 30 30 29 30 29 355 2050 */ { 03, 01, 23, 0b0101010110110000 }, /* 29 30 29 30 29 30 29 30 30 29 30 30 29 384 2051 */ { 00, 02, 11, 0b1001001101110000 }, /* 30 29 29 30 29 29 30 30 29 30 30 30 355 2052 */ { 08, 02, 01, 0b0100100101111000 }, /* 29 30 29 29 30 29 29 30 29 30 30 30 30 384 2053 */ { 00, 02, 19, 0b0100100101110000 }, /* 29 30 29 29 30 29 29 30 29 30 30 30 354 2054 */ { 00, 02, 08, 0b0110010010110000 }, /* 29 30 30 29 29 30 29 29 30 29 30 30 354 2055 */ { 06, 01, 28, 0b0110101001010000 }, /* 29 30 30 29 30 29 30 29 29 30 29 30 29 383 2056 */ { 00, 02, 15, 0b1110101001010000 }, /* 30 30 30 29 30 29 30 29 29 30 29 30 355 2057 */ { 00, 02, 04, 0b0110101010100000 }, /* 29 30 30 29 30 29 30 29 30 29 30 29 354 2058 */ { 04, 01, 24, 0b1010101101100000 }, /* 30 29 30 29 30 29 30 30 29 30 30 29 29 384 2059 */ { 00, 02, 12, 0b1010101011100000 }, /* 30 29 30 29 30 29 30 29 30 30 30 29 355 2060 */ { 00, 02, 02, 0b1001001011100000 }, /* 30 29 29 30 29 29 30 29 30 30 30 29 354 2061 */ { 03, 01, 21, 0b1100100101110000 }, /* 30 30 29 29 30 29 29 30 29 30 30 30 29 384 2062 */ { 00, 02, 09, 0b1100100101100000 }, /* 30 30 29 29 30 29 29 30 29 30 30 29 354 2063 */ { 07, 01, 29, 0b1101010010101000 }, /* 30 30 29 30 29 30 29 29 30 29 30 29 30 384 2064 */ { 00, 02, 17, 0b1101010010100000 }, /* 30 30 29 30 29 30 29 29 30 29 30 29 354 2065 */ { 00, 02, 05, 0b1101101001010000 }, /* 30 30 29 30 30 29 30 29 29 30 29 30 355 2066 */ { 05, 01, 26, 0b0101101010101000 }, /* 29 30 29 30 30 29 30 29 30 29 30 29 30 384 2067 */ { 00, 02, 14, 0b0101011010100000 }, /* 29 30 29 30 29 30 30 29 30 29 30 29 354 2068 */ { 00, 02, 03, 0b1010011011010000 }, /* 30 29 30 29 29 30 30 29 30 30 29 30 355 2069 */ { 04, 01, 23, 0b0101001011101000 }, /* 29 30 29 30 29 29 30 29 30 30 30 29 30 384 2070 */ { 00, 02, 11, 0b0101001011010000 }, /* 29 30 29 30 29 29 30 29 30 30 29 30 354 2071 */ { 08, 01, 31, 0b1010100101011000 }, /* 30 29 30 29 30 29 29 30 29 30 29 30 30 384 2072 */ { 00, 02, 19, 0b1010100101010000 }, /* 30 29 30 29 30 29 29 30 29 30 29 30 354 2073 */ { 00, 02, 07, 0b1011010010100000 }, /* 30 29 30 30 29 30 29 29 30 29 30 29 354 2074 */ { 06, 01, 27, 0b1011010101010000 }, /* 30 29 30 30 29 30 29 30 29 30 29 30 29 384 2075 */ { 00, 02, 15, 0b1010110101010000 }, /* 30 29 30 29 30 30 29 30 29 30 29 30 355 2076 */ { 00, 02, 05, 0b0101010110100000 }, /* 29 30 29 30 29 30 29 30 30 29 30 29 354 2077 */ { 04, 01, 24, 0b1010010111010000 }, /* 30 29 30 29 29 30 29 30 30 30 29 30 29 384 2078 */ { 00, 02, 12, 0b1010010110110000 }, /* 30 29 30 29 29 30 29 30 30 29 30 30 355 2079 */ { 00, 02, 02, 0b0101001010110000 }, /* 29 30 29 30 29 29 30 29 30 29 30 30 354 2080 */ { 03, 01, 22, 0b1010100100111000 }, /* 30 29 30 29 30 29 29 30 29 29 30 30 30 384 2081 */ { 00, 02, 09, 0b0110100100110000 }, /* 29 30 30 29 30 29 29 30 29 29 30 30 354 2082 */ { 07, 01, 29, 0b0111001010011000 }, /* 29 30 30 30 29 29 30 29 30 29 29 30 30 384 2083 */ { 00, 02, 17, 0b0110101010100000 }, /* 29 30 30 29 30 29 30 29 30 29 30 29 354 2084 */ { 00, 02, 06, 0b1010110101010000 }, /* 30 29 30 29 30 30 29 30 29 30 29 30 355 2085 */ { 05, 01, 26, 0b0100110110101000 }, /* 29 30 29 29 30 30 29 30 30 29 30 29 30 384 2086 */ { 00, 02, 14, 0b0100101101100000 }, /* 29 30 29 29 30 29 30 30 29 30 30 29 354 2087 */ { 00, 02, 03, 0b1010010101110000 }, /* 30 29 30 29 29 30 29 30 29 30 30 30 355 2088 */ { 04, 01, 24, 0b0101001001110000 }, /* 29 30 29 30 29 29 30 29 29 30 30 30 29 383 2089 */ { 00, 02, 10, 0b1101000101100000 }, /* 30 30 29 30 29 29 29 30 29 30 30 29 354 2090 */ { 08, 01, 30, 0b1110100100110000 }, /* 30 30 30 29 30 29 29 30 29 29 30 30 29 384 2091 */ { 00, 02, 18, 0b1101010100100000 }, /* 30 30 29 30 29 30 29 30 29 29 30 29 354 2092 */ { 00, 02, 07, 0b1101101010100000 }, /* 30 30 29 30 30 29 30 29 30 29 30 29 355 2093 */ { 06, 01, 27, 0b0110101101010000 }, /* 29 30 30 29 30 29 30 30 29 30 29 30 29 384 2094 */ { 00, 02, 15, 0b0101011011010000 }, /* 29 30 29 30 29 30 30 29 30 30 29 30 355 2095 */ { 00, 02, 05, 0b0100101011100000 }, /* 29 30 29 29 30 29 30 29 30 30 30 29 354 2096 */ { 04, 01, 25, 0b1010010011101000 }, /* 30 29 30 29 29 30 29 29 30 30 30 29 30 384 2097 */ { 00, 02, 12, 0b1010001011010000 }, /* 30 29 30 29 29 29 30 29 30 30 29 30 354 2098 */ { 00, 02, 01, 0b1101000101010000 }, /* 30 30 29 30 29 29 29 30 29 30 29 30 354 2099 */ { 02, 01, 21, 0b1101100100101000 }, /* 30 30 29 30 30 29 29 30 29 29 30 29 30 384 2100 */ { 00, 02, 09, 0b1101010100100000 }, /* 30 30 29 30 29 30 29 30 29 29 30 29 354 */ }; internal override int MinCalendarYear => MinLunisolarYear; internal override int MaxCalendarYear => MaxLunisolarYear; internal override DateTime MinDate => s_minDate; internal override DateTime MaxDate => s_maxDate; internal override EraInfo[]? CalEraInfo => null; internal override int GetYearInfo(int lunarYear, int index) { if (lunarYear < MinLunisolarYear || lunarYear > MaxLunisolarYear) { throw new ArgumentOutOfRangeException("year", lunarYear, SR.Format(SR.ArgumentOutOfRange_Range, MinLunisolarYear, MaxLunisolarYear)); } return s_yinfo[lunarYear - MinLunisolarYear, index]; } internal override int GetYear(int year, DateTime time) { return year; } internal override int GetGregorianYear(int year, int era) { if (era != CurrentEra && era != ChineseEra) { throw new ArgumentOutOfRangeException(nameof(era), era, SR.ArgumentOutOfRange_InvalidEraValue); } if (year < MinLunisolarYear || year > MaxLunisolarYear) { throw new ArgumentOutOfRangeException(nameof(year), year, SR.Format(SR.ArgumentOutOfRange_Range, MinLunisolarYear, MaxLunisolarYear)); } return year; } public ChineseLunisolarCalendar() { } public override int GetEra(DateTime time) { CheckTicksRange(time.Ticks); return ChineseEra; } internal override CalendarId ID => CalendarId.CHINESELUNISOLAR; internal override CalendarId BaseCalendarID { get { //Use CAL_GREGORIAN just to get CurrentEraValue as 1 since we do not have data under the ID CAL_ChineseLunisolar yet return CalendarId.GREGORIAN; } } public override int[] Eras => new int[] { ChineseEra }; } }
/* Generated SBE (Simple Binary Encoding) message codec */ #pragma warning disable 1591 // disable warning on missing comments using System; using Adaptive.SimpleBinaryEncoding; namespace Adaptive.SimpleBinaryEncoding.PerfTests.Bench.SBE.FIX { public sealed partial class OrderCancelRequest { public const ushort BlockLength = (ushort)119; public const ushort TemplateId = (ushort)70; public const ushort SchemaId = (ushort)2; public const ushort Schema_Version = (ushort)0; public const string SematicType = "F"; private readonly OrderCancelRequest _parentMessage; private DirectBuffer _buffer; private int _offset; private int _limit; private int _actingBlockLength; private int _actingVersion; public int Offset { get { return _offset; } } public OrderCancelRequest() { _parentMessage = this; } public void WrapForEncode(DirectBuffer buffer, int offset) { _buffer = buffer; _offset = offset; _actingBlockLength = BlockLength; _actingVersion = Schema_Version; Limit = offset + _actingBlockLength; } public void WrapForDecode(DirectBuffer buffer, int offset, int actingBlockLength, int actingVersion) { _buffer = buffer; _offset = offset; _actingBlockLength = actingBlockLength; _actingVersion = actingVersion; Limit = offset + _actingBlockLength; } public int Size { get { return _limit - _offset; } } public int Limit { get { return _limit; } set { _buffer.CheckLimit(_limit); _limit = value; } } public const int AccountId = 1; public static string AccountMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return "String"; } return ""; } public const byte AccountNullValue = (byte)0; public const byte AccountMinValue = (byte)32; public const byte AccountMaxValue = (byte)126; public const int AccountLength = 12; public byte GetAccount(int index) { if (index < 0 || index >= 12) { throw new IndexOutOfRangeException("index out of range: index=" + index); } return _buffer.CharGet(_offset + 0 + (index * 1)); } public void SetAccount(int index, byte value) { if (index < 0 || index >= 12) { throw new IndexOutOfRangeException("index out of range: index=" + index); } _buffer.CharPut(_offset + 0 + (index * 1), value); } public const string AccountCharacterEncoding = "UTF-8"; public int GetAccount(byte[] dst, int dstOffset) { const int length = 12; if (dstOffset < 0 || dstOffset > (dst.Length - length)) { throw new IndexOutOfRangeException("dstOffset out of range for copy: offset=" + dstOffset); } _buffer.GetBytes(_offset + 0, dst, dstOffset, length); return length; } public void SetAccount(byte[] src, int srcOffset) { const int length = 12; if (srcOffset < 0 || srcOffset > (src.Length - length)) { throw new IndexOutOfRangeException("srcOffset out of range for copy: offset=" + srcOffset); } _buffer.SetBytes(_offset + 0, src, srcOffset, length); } public const int ClOrdIDId = 11; public static string ClOrdIDMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return "String"; } return ""; } public const byte ClOrdIDNullValue = (byte)0; public const byte ClOrdIDMinValue = (byte)32; public const byte ClOrdIDMaxValue = (byte)126; public const int ClOrdIDLength = 20; public byte GetClOrdID(int index) { if (index < 0 || index >= 20) { throw new IndexOutOfRangeException("index out of range: index=" + index); } return _buffer.CharGet(_offset + 12 + (index * 1)); } public void SetClOrdID(int index, byte value) { if (index < 0 || index >= 20) { throw new IndexOutOfRangeException("index out of range: index=" + index); } _buffer.CharPut(_offset + 12 + (index * 1), value); } public const string ClOrdIDCharacterEncoding = "UTF-8"; public int GetClOrdID(byte[] dst, int dstOffset) { const int length = 20; if (dstOffset < 0 || dstOffset > (dst.Length - length)) { throw new IndexOutOfRangeException("dstOffset out of range for copy: offset=" + dstOffset); } _buffer.GetBytes(_offset + 12, dst, dstOffset, length); return length; } public void SetClOrdID(byte[] src, int srcOffset) { const int length = 20; if (srcOffset < 0 || srcOffset > (src.Length - length)) { throw new IndexOutOfRangeException("srcOffset out of range for copy: offset=" + srcOffset); } _buffer.SetBytes(_offset + 12, src, srcOffset, length); } public const int OrderIDId = 37; public static string OrderIDMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return "int"; } return ""; } public const long OrderIDNullValue = -9223372036854775808L; public const long OrderIDMinValue = -9223372036854775807L; public const long OrderIDMaxValue = 9223372036854775807L; public long OrderID { get { return _buffer.Int64GetLittleEndian(_offset + 32); } set { _buffer.Int64PutLittleEndian(_offset + 32, value); } } public const int OrigClOrdIDId = 41; public static string OrigClOrdIDMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return "String"; } return ""; } public const byte OrigClOrdIDNullValue = (byte)0; public const byte OrigClOrdIDMinValue = (byte)32; public const byte OrigClOrdIDMaxValue = (byte)126; public const int OrigClOrdIDLength = 20; public byte GetOrigClOrdID(int index) { if (index < 0 || index >= 20) { throw new IndexOutOfRangeException("index out of range: index=" + index); } return _buffer.CharGet(_offset + 40 + (index * 1)); } public void SetOrigClOrdID(int index, byte value) { if (index < 0 || index >= 20) { throw new IndexOutOfRangeException("index out of range: index=" + index); } _buffer.CharPut(_offset + 40 + (index * 1), value); } public const string OrigClOrdIDCharacterEncoding = "UTF-8"; public int GetOrigClOrdID(byte[] dst, int dstOffset) { const int length = 20; if (dstOffset < 0 || dstOffset > (dst.Length - length)) { throw new IndexOutOfRangeException("dstOffset out of range for copy: offset=" + dstOffset); } _buffer.GetBytes(_offset + 40, dst, dstOffset, length); return length; } public void SetOrigClOrdID(byte[] src, int srcOffset) { const int length = 20; if (srcOffset < 0 || srcOffset > (src.Length - length)) { throw new IndexOutOfRangeException("srcOffset out of range for copy: offset=" + srcOffset); } _buffer.SetBytes(_offset + 40, src, srcOffset, length); } public const int SideId = 54; public static string SideMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return "char"; } return ""; } public Side Side { get { return (Side)_buffer.CharGet(_offset + 60); } set { _buffer.CharPut(_offset + 60, (byte)value); } } public const int SymbolId = 55; public static string SymbolMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return "String"; } return ""; } public const byte SymbolNullValue = (byte)0; public const byte SymbolMinValue = (byte)32; public const byte SymbolMaxValue = (byte)126; public const int SymbolLength = 6; public byte GetSymbol(int index) { if (index < 0 || index >= 6) { throw new IndexOutOfRangeException("index out of range: index=" + index); } return _buffer.CharGet(_offset + 61 + (index * 1)); } public void SetSymbol(int index, byte value) { if (index < 0 || index >= 6) { throw new IndexOutOfRangeException("index out of range: index=" + index); } _buffer.CharPut(_offset + 61 + (index * 1), value); } public const string SymbolCharacterEncoding = "UTF-8"; public int GetSymbol(byte[] dst, int dstOffset) { const int length = 6; if (dstOffset < 0 || dstOffset > (dst.Length - length)) { throw new IndexOutOfRangeException("dstOffset out of range for copy: offset=" + dstOffset); } _buffer.GetBytes(_offset + 61, dst, dstOffset, length); return length; } public void SetSymbol(byte[] src, int srcOffset) { const int length = 6; if (srcOffset < 0 || srcOffset > (src.Length - length)) { throw new IndexOutOfRangeException("srcOffset out of range for copy: offset=" + srcOffset); } _buffer.SetBytes(_offset + 61, src, srcOffset, length); } public const int TransactTimeId = 60; public static string TransactTimeMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return "UTCTimestamp"; } return ""; } public const ulong TransactTimeNullValue = 0x8000000000000000UL; public const ulong TransactTimeMinValue = 0x0UL; public const ulong TransactTimeMaxValue = 0x7fffffffffffffffUL; public ulong TransactTime { get { return _buffer.Uint64GetLittleEndian(_offset + 67); } set { _buffer.Uint64PutLittleEndian(_offset + 67, value); } } public const int ManualOrderIndicatorId = 1028; public static string ManualOrderIndicatorMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return ""; } return ""; } public BooleanType ManualOrderIndicator { get { return (BooleanType)_buffer.Uint8Get(_offset + 75); } set { _buffer.Uint8Put(_offset + 75, (byte)value); } } public const int SecurityDescId = 107; public static string SecurityDescMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return "String"; } return ""; } public const byte SecurityDescNullValue = (byte)0; public const byte SecurityDescMinValue = (byte)32; public const byte SecurityDescMaxValue = (byte)126; public const int SecurityDescLength = 20; public byte GetSecurityDesc(int index) { if (index < 0 || index >= 20) { throw new IndexOutOfRangeException("index out of range: index=" + index); } return _buffer.CharGet(_offset + 76 + (index * 1)); } public void SetSecurityDesc(int index, byte value) { if (index < 0 || index >= 20) { throw new IndexOutOfRangeException("index out of range: index=" + index); } _buffer.CharPut(_offset + 76 + (index * 1), value); } public const string SecurityDescCharacterEncoding = "UTF-8"; public int GetSecurityDesc(byte[] dst, int dstOffset) { const int length = 20; if (dstOffset < 0 || dstOffset > (dst.Length - length)) { throw new IndexOutOfRangeException("dstOffset out of range for copy: offset=" + dstOffset); } _buffer.GetBytes(_offset + 76, dst, dstOffset, length); return length; } public void SetSecurityDesc(byte[] src, int srcOffset) { const int length = 20; if (srcOffset < 0 || srcOffset > (src.Length - length)) { throw new IndexOutOfRangeException("srcOffset out of range for copy: offset=" + srcOffset); } _buffer.SetBytes(_offset + 76, src, srcOffset, length); } public const int SecurityTypeId = 167; public static string SecurityTypeMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return "String"; } return ""; } public const byte SecurityTypeNullValue = (byte)0; public const byte SecurityTypeMinValue = (byte)32; public const byte SecurityTypeMaxValue = (byte)126; public const int SecurityTypeLength = 3; public byte GetSecurityType(int index) { if (index < 0 || index >= 3) { throw new IndexOutOfRangeException("index out of range: index=" + index); } return _buffer.CharGet(_offset + 96 + (index * 1)); } public void SetSecurityType(int index, byte value) { if (index < 0 || index >= 3) { throw new IndexOutOfRangeException("index out of range: index=" + index); } _buffer.CharPut(_offset + 96 + (index * 1), value); } public const string SecurityTypeCharacterEncoding = "UTF-8"; public int GetSecurityType(byte[] dst, int dstOffset) { const int length = 3; if (dstOffset < 0 || dstOffset > (dst.Length - length)) { throw new IndexOutOfRangeException("dstOffset out of range for copy: offset=" + dstOffset); } _buffer.GetBytes(_offset + 96, dst, dstOffset, length); return length; } public void SetSecurityType(byte[] src, int srcOffset) { const int length = 3; if (srcOffset < 0 || srcOffset > (src.Length - length)) { throw new IndexOutOfRangeException("srcOffset out of range for copy: offset=" + srcOffset); } _buffer.SetBytes(_offset + 96, src, srcOffset, length); } public const int CorrelationClOrdIDId = 9717; public static string CorrelationClOrdIDMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return "String"; } return ""; } public const byte CorrelationClOrdIDNullValue = (byte)0; public const byte CorrelationClOrdIDMinValue = (byte)32; public const byte CorrelationClOrdIDMaxValue = (byte)126; public const int CorrelationClOrdIDLength = 20; public byte GetCorrelationClOrdID(int index) { if (index < 0 || index >= 20) { throw new IndexOutOfRangeException("index out of range: index=" + index); } return _buffer.CharGet(_offset + 99 + (index * 1)); } public void SetCorrelationClOrdID(int index, byte value) { if (index < 0 || index >= 20) { throw new IndexOutOfRangeException("index out of range: index=" + index); } _buffer.CharPut(_offset + 99 + (index * 1), value); } public const string CorrelationClOrdIDCharacterEncoding = "UTF-8"; public int GetCorrelationClOrdID(byte[] dst, int dstOffset) { const int length = 20; if (dstOffset < 0 || dstOffset > (dst.Length - length)) { throw new IndexOutOfRangeException("dstOffset out of range for copy: offset=" + dstOffset); } _buffer.GetBytes(_offset + 99, dst, dstOffset, length); return length; } public void SetCorrelationClOrdID(byte[] src, int srcOffset) { const int length = 20; if (srcOffset < 0 || srcOffset > (src.Length - length)) { throw new IndexOutOfRangeException("srcOffset out of range for copy: offset=" + srcOffset); } _buffer.SetBytes(_offset + 99, src, srcOffset, length); } } }
// ==++== // // Copyright (c) Microsoft Corporation. All rights reserved. // // ==--== /*============================================================================= ** ** Class: AppDomainSetup ** ** <OWNER>blanders</OWNER> ** ** Purpose: Defines the settings that the loader uses to find assemblies in an ** AppDomain ** ** Date: Dec 22, 2000 ** =============================================================================*/ namespace System { using System; #if FEATURE_CLICKONCE #if !FEATURE_PAL using System.Deployment.Internal.Isolation; using System.Deployment.Internal.Isolation.Manifest; using System.Runtime.Hosting; #endif #endif using System.Runtime.CompilerServices; using System.Runtime; using System.Text; using System.Threading; using System.Runtime.InteropServices; using System.Runtime.Serialization; using System.Reflection; using System.Security; using System.Security.Permissions; using System.Security.Policy; using System.Globalization; using Path = System.IO.Path; using System.Runtime.Versioning; using System.Diagnostics.Contracts; using System.Collections; using System.Collections.Generic; [Serializable] [ClassInterface(ClassInterfaceType.None)] [System.Runtime.InteropServices.ComVisible(true)] public sealed class AppDomainSetup : IAppDomainSetup { [Serializable] internal enum LoaderInformation { // If you add a new value, add the corresponding property // to AppDomain.GetData() and SetData()'s switch statements. ApplicationBaseValue = LOADER_APPLICATION_BASE, ConfigurationFileValue = LOADER_CONFIGURATION_BASE, DynamicBaseValue = LOADER_DYNAMIC_BASE, DevPathValue = LOADER_DEVPATH, ApplicationNameValue = LOADER_APPLICATION_NAME, PrivateBinPathValue = LOADER_PRIVATE_PATH, PrivateBinPathProbeValue = LOADER_PRIVATE_BIN_PATH_PROBE, ShadowCopyDirectoriesValue = LOADER_SHADOW_COPY_DIRECTORIES, ShadowCopyFilesValue = LOADER_SHADOW_COPY_FILES, CachePathValue = LOADER_CACHE_PATH, LicenseFileValue = LOADER_LICENSE_FILE, DisallowPublisherPolicyValue = LOADER_DISALLOW_PUBLISHER_POLICY, DisallowCodeDownloadValue = LOADER_DISALLOW_CODE_DOWNLOAD, DisallowBindingRedirectsValue = LOADER_DISALLOW_BINDING_REDIRECTS, DisallowAppBaseProbingValue = LOADER_DISALLOW_APPBASE_PROBING, ConfigurationBytesValue = LOADER_CONFIGURATION_BYTES, LoaderMaximum = LOADER_MAXIMUM, } // This class has an unmanaged representation so be aware you will need to make edits in vm\object.h if you change the order // of these fields or add new ones. private string[] _Entries; private LoaderOptimization _LoaderOptimization; #pragma warning disable 169 private String _AppBase; // for compat with v1.1 #pragma warning restore 169 [OptionalField(VersionAdded = 2)] private AppDomainInitializer _AppDomainInitializer; [OptionalField(VersionAdded = 2)] private string[] _AppDomainInitializerArguments; #if FEATURE_CLICKONCE [OptionalField(VersionAdded = 2)] private ActivationArguments _ActivationArguments; #endif #if FEATURE_CORECLR // On the CoreCLR, this contains just the name of the permission set that we install in the new appdomain. // Not the ToXml().ToString() of an ApplicationTrust object. #endif [OptionalField(VersionAdded = 2)] private string _ApplicationTrust; [OptionalField(VersionAdded = 2)] private byte[] _ConfigurationBytes; #if FEATURE_COMINTEROP [OptionalField(VersionAdded = 3)] private bool _DisableInterfaceCache = false; #endif // FEATURE_COMINTEROP [OptionalField(VersionAdded = 4)] private string _AppDomainManagerAssembly; [OptionalField(VersionAdded = 4)] private string _AppDomainManagerType; #if FEATURE_APTCA [OptionalField(VersionAdded = 4)] private string[] _AptcaVisibleAssemblies; #endif // A collection of strings used to indicate which breaking changes shouldn't be applied // to an AppDomain. We only use the keys, the values are ignored. [OptionalField(VersionAdded = 4)] private Dictionary<string, object> _CompatFlags; [OptionalField(VersionAdded = 5)] // This was added in .NET FX v4.5 private String _TargetFrameworkName; #if !FEATURE_CORECLR [NonSerialized] internal AppDomainSortingSetupInfo _AppDomainSortingSetupInfo; #endif [OptionalField(VersionAdded = 5)] // This was added in .NET FX v4.5 private bool _CheckedForTargetFrameworkName; #if FEATURE_RANDOMIZED_STRING_HASHING [OptionalField(VersionAdded = 5)] // This was added in .NET FX v4.5 private bool _UseRandomizedStringHashing; #endif [SecuritySafeCritical] internal AppDomainSetup(AppDomainSetup copy, bool copyDomainBoundData) { string[] mine = Value; if(copy != null) { string[] other = copy.Value; int mineSize = _Entries.Length; int otherSize = other.Length; int size = (otherSize < mineSize) ? otherSize : mineSize; for (int i = 0; i < size; i++) mine[i] = other[i]; if (size < mineSize) { // This case can happen when the copy is a deserialized version of // an AppDomainSetup object serialized by Everett. for (int i = size; i < mineSize; i++) mine[i] = null; } _LoaderOptimization = copy._LoaderOptimization; _AppDomainInitializerArguments = copy.AppDomainInitializerArguments; #if FEATURE_CLICKONCE _ActivationArguments = copy.ActivationArguments; #endif _ApplicationTrust = copy._ApplicationTrust; if (copyDomainBoundData) _AppDomainInitializer = copy.AppDomainInitializer; else _AppDomainInitializer = null; _ConfigurationBytes = copy.GetConfigurationBytes(); #if FEATURE_COMINTEROP _DisableInterfaceCache = copy._DisableInterfaceCache; #endif // FEATURE_COMINTEROP _AppDomainManagerAssembly = copy.AppDomainManagerAssembly; _AppDomainManagerType = copy.AppDomainManagerType; #if FEATURE_APTCA _AptcaVisibleAssemblies = copy.PartialTrustVisibleAssemblies; #endif if (copy._CompatFlags != null) { SetCompatibilitySwitches(copy._CompatFlags.Keys); } #if !FEATURE_CORECLR if(copy._AppDomainSortingSetupInfo != null) { _AppDomainSortingSetupInfo = new AppDomainSortingSetupInfo(copy._AppDomainSortingSetupInfo); } #endif _TargetFrameworkName = copy._TargetFrameworkName; #if FEATURE_RANDOMIZED_STRING_HASHING _UseRandomizedStringHashing = copy._UseRandomizedStringHashing; #endif } else _LoaderOptimization = LoaderOptimization.NotSpecified; } public AppDomainSetup() { _LoaderOptimization = LoaderOptimization.NotSpecified; } #if FEATURE_CLICKONCE // Creates an AppDomainSetup object from an application identity. [ResourceExposure(ResourceScope.Machine)] [ResourceConsumption(ResourceScope.Machine)] public AppDomainSetup (ActivationContext activationContext) : this (new ActivationArguments(activationContext)) {} [System.Security.SecuritySafeCritical] // auto-generated [ResourceExposure(ResourceScope.Machine)] [ResourceConsumption(ResourceScope.Machine)] public AppDomainSetup (ActivationArguments activationArguments) { if (activationArguments == null) throw new ArgumentNullException("activationArguments"); Contract.EndContractBlock(); _LoaderOptimization = LoaderOptimization.NotSpecified; ActivationArguments = activationArguments; Contract.Assert(activationArguments.ActivationContext != null, "Cannot set base directory without activation context"); string entryPointPath = CmsUtils.GetEntryPointFullPath(activationArguments); if (!String.IsNullOrEmpty(entryPointPath)) SetupDefaults(entryPointPath); else ApplicationBase = activationArguments.ActivationContext.ApplicationDirectory; } #endif // !FEATURE_CLICKONCE #if FEATURE_CORECLR [System.Security.SecurityCritical] // auto-generated #endif [ResourceExposure(ResourceScope.Machine)] [ResourceConsumption(ResourceScope.Machine)] internal void SetupDefaults(string imageLocation, bool imageLocationAlreadyNormalized = false) { char[] sep = {'\\', '/'}; int i = imageLocation.LastIndexOfAny(sep); if (i == -1) { ApplicationName = imageLocation; } else { ApplicationName = imageLocation.Substring(i+1); string appBase = imageLocation.Substring(0, i+1); if (imageLocationAlreadyNormalized) Value[(int) LoaderInformation.ApplicationBaseValue] = appBase; else ApplicationBase = appBase; } ConfigurationFile = ApplicationName + AppDomainSetup.ConfigurationExtension; } internal string[] Value { get { if( _Entries == null) _Entries = new String[LOADER_MAXIMUM]; return _Entries; } } internal String GetUnsecureApplicationBase() { return Value[(int) LoaderInformation.ApplicationBaseValue]; } public string AppDomainManagerAssembly { get { return _AppDomainManagerAssembly; } set { _AppDomainManagerAssembly = value; } } public string AppDomainManagerType { get { return _AppDomainManagerType; } set { _AppDomainManagerType = value; } } #if FEATURE_APTCA public string[] PartialTrustVisibleAssemblies { get { return _AptcaVisibleAssemblies; } set { if (value != null) { _AptcaVisibleAssemblies = (string[])value.Clone(); Array.Sort<string>(_AptcaVisibleAssemblies, StringComparer.OrdinalIgnoreCase); } else { _AptcaVisibleAssemblies = null; } } } #endif public String ApplicationBase { #if FEATURE_CORECLR [System.Security.SecurityCritical] // auto-generated #else [System.Security.SecuritySafeCritical] #endif [Pure] [ResourceExposure(ResourceScope.Machine)] [ResourceConsumption(ResourceScope.Machine)] get { return VerifyDir(GetUnsecureApplicationBase(), false); } #if FEATURE_CORECLR [System.Security.SecurityCritical] // auto-generated #endif [ResourceExposure(ResourceScope.Machine)] [ResourceConsumption(ResourceScope.Machine)] set { Value[(int) LoaderInformation.ApplicationBaseValue] = NormalizePath(value, false); } } [ResourceExposure(ResourceScope.Machine)] [ResourceConsumption(ResourceScope.Machine)] private String NormalizePath(String path, bool useAppBase) { if(path == null) return null; // If we add very long file name support ("\\?\") to the Path class then this is unnecesary, // but we do not plan on doing this for now. if (!useAppBase) path = System.Security.Util.URLString.PreProcessForExtendedPathRemoval(path, false); int len = path.Length; if (len == 0) return null; bool UNCpath = false; if ((len > 7) && (String.Compare( path, 0, "file:", 0, 5, StringComparison.OrdinalIgnoreCase) == 0)) { int trim; if (path[6] == '\\') { if ((path[7] == '\\') || (path[7] == '/')) { // Don't allow "file:\\\\", because we can't tell the difference // with it for "file:\\" + "\\server" and "file:\\\" + "\localpath" if ( (len > 8) && ((path[8] == '\\') || (path[8] == '/')) ) throw new ArgumentException(Environment.GetResourceString("Argument_InvalidPathChars")); // file:\\\ means local path else trim = 8; } // file:\\ means remote server else { trim = 5; UNCpath = true; } } // local path else if (path[7] == '/') trim = 8; // remote else { // file://\\remote if ( (len > 8) && (path[7] == '\\') && (path[8] == '\\') ) trim = 7; else { // file://remote trim = 5; // Create valid UNC path by changing // all occurences of '/' to '\\' in path System.Text.StringBuilder winPathBuilder = new System.Text.StringBuilder(len); for (int i = 0; i < len; i++) { char c = path[i]; if (c == '/') winPathBuilder.Append('\\'); else winPathBuilder.Append(c); } path = winPathBuilder.ToString(); } UNCpath = true; } path = path.Substring(trim); len -= trim; } bool localPath; // UNC if (UNCpath || ( (len > 1) && ( (path[0] == '/') || (path[0] == '\\') ) && ( (path[1] == '/') || (path[1] == '\\') ) )) localPath = false; else { int colon = path.IndexOf(':') + 1; // protocol other than file: if ((colon != 0) && (len > colon+1) && ( (path[colon] == '/') || (path[colon] == '\\') ) && ( (path[colon+1] == '/') || (path[colon+1] == '\\') )) localPath = false; else localPath = true; } if (localPath) { if (useAppBase && ( (len == 1) || (path[1] != ':') )) { String appBase = Value[(int) LoaderInformation.ApplicationBaseValue]; if ((appBase == null) || (appBase.Length == 0)) throw new MemberAccessException(Environment.GetResourceString("AppDomain_AppBaseNotSet")); StringBuilder result = StringBuilderCache.Acquire(); bool slash = false; if ((path[0] == '/') || (path[0] == '\\')) { String pathRoot = Path.GetPathRoot(appBase); if (pathRoot.Length == 0) { // URL int index = appBase.IndexOf(":/", StringComparison.Ordinal); if (index == -1) index = appBase.IndexOf(":\\", StringComparison.Ordinal); // Get past last slashes of "url:http://" int urlLen = appBase.Length; for (index += 1; (index < urlLen) && ((appBase[index] == '/') || (appBase[index] == '\\')); index++); // Now find the next slash to get domain name for(; (index < urlLen) && (appBase[index] != '/') && (appBase[index] != '\\'); index++); pathRoot = appBase.Substring(0, index); } result.Append(pathRoot); slash = true; } else result.Append(appBase); // Make sure there's a slash separator (and only one) int aLen = result.Length - 1; if ((result[aLen] != '/') && (result[aLen] != '\\')) { if (!slash) { if (appBase.IndexOf(":/", StringComparison.Ordinal) == -1) result.Append('\\'); else result.Append('/'); } } else if (slash) result.Remove(aLen, 1); result.Append(path); path = StringBuilderCache.GetStringAndRelease(result); } else path = Path.GetFullPathInternal(path); } return path; } private bool IsFilePath(String path) { return (path[1] == ':') || ( (path[0] == '\\') && (path[1] == '\\') ); } internal static String ApplicationBaseKey { get { return ACTAG_APP_BASE_URL; } } public String ConfigurationFile { [System.Security.SecuritySafeCritical] // auto-generated [ResourceExposure(ResourceScope.Machine)] [ResourceConsumption(ResourceScope.Machine)] get { return VerifyDir(Value[(int) LoaderInformation.ConfigurationFileValue], true); } [ResourceExposure(ResourceScope.Machine)] [ResourceConsumption(ResourceScope.Machine)] set { Value[(int) LoaderInformation.ConfigurationFileValue] = value; } } // Used by the ResourceManager internally. This must not do any // security checks to avoid infinite loops. internal String ConfigurationFileInternal { [ResourceExposure(ResourceScope.Machine)] [ResourceConsumption(ResourceScope.Machine)] get { return NormalizePath(Value[(int) LoaderInformation.ConfigurationFileValue], true); } } internal static String ConfigurationFileKey { get { return ACTAG_APP_CONFIG_FILE; } } public byte[] GetConfigurationBytes() { if (_ConfigurationBytes == null) return null; return (byte[]) _ConfigurationBytes.Clone(); } public void SetConfigurationBytes(byte[] value) { _ConfigurationBytes = value; } private static String ConfigurationBytesKey { get { return ACTAG_APP_CONFIG_BLOB; } } // only needed by AppDomain.Setup(). Not really needed by users. internal Dictionary<string, object> GetCompatibilityFlags() { return _CompatFlags; } public void SetCompatibilitySwitches(IEnumerable<String> switches) { #if !FEATURE_CORECLR if(_AppDomainSortingSetupInfo != null) { _AppDomainSortingSetupInfo._useV2LegacySorting = false; _AppDomainSortingSetupInfo._useV4LegacySorting = false; } #endif #if FEATURE_RANDOMIZED_STRING_HASHING _UseRandomizedStringHashing = false; #endif if (switches != null) { _CompatFlags = new Dictionary<string, object>(); foreach (String str in switches) { #if !FEATURE_CORECLR if(StringComparer.OrdinalIgnoreCase.Equals("NetFx40_Legacy20SortingBehavior", str)) { if(_AppDomainSortingSetupInfo == null) { _AppDomainSortingSetupInfo = new AppDomainSortingSetupInfo(); } _AppDomainSortingSetupInfo._useV2LegacySorting = true; } if(StringComparer.OrdinalIgnoreCase.Equals("NetFx45_Legacy40SortingBehavior", str)) { if(_AppDomainSortingSetupInfo == null) { _AppDomainSortingSetupInfo = new AppDomainSortingSetupInfo(); } _AppDomainSortingSetupInfo._useV4LegacySorting = true; } #endif #if FEATURE_RANDOMIZED_STRING_HASHING if(StringComparer.OrdinalIgnoreCase.Equals("UseRandomizedStringHashAlgorithm", str)) { _UseRandomizedStringHashing = true; } #endif _CompatFlags.Add(str, null); } } else { _CompatFlags = null; } } // A target Framework moniker, in a format parsible by the FrameworkName class. public String TargetFrameworkName { get { return _TargetFrameworkName; } set { _TargetFrameworkName = value; } } internal bool CheckedForTargetFrameworkName { get { return _CheckedForTargetFrameworkName; } set { _CheckedForTargetFrameworkName = value; } } #if !FEATURE_CORECLR [SecurityCritical] public void SetNativeFunction(string functionName, int functionVersion, IntPtr functionPointer) { if(functionName == null) { throw new ArgumentNullException("functionName"); } if(functionPointer == IntPtr.Zero) { throw new ArgumentNullException("functionPointer"); } if(String.IsNullOrWhiteSpace(functionName)) { throw new ArgumentException(Environment.GetResourceString("Argument_NPMSInvalidName"), "functionName"); } Contract.EndContractBlock(); if(functionVersion < 1) { throw new ArgumentException(Environment.GetResourceString("ArgumentException_MinSortingVersion", 1, functionName)); } if(_AppDomainSortingSetupInfo == null) { _AppDomainSortingSetupInfo = new AppDomainSortingSetupInfo(); } if(String.Equals(functionName, "IsNLSDefinedString", StringComparison.OrdinalIgnoreCase)) { _AppDomainSortingSetupInfo._pfnIsNLSDefinedString = functionPointer; } if (String.Equals(functionName, "CompareStringEx", StringComparison.OrdinalIgnoreCase)) { _AppDomainSortingSetupInfo._pfnCompareStringEx = functionPointer; } if (String.Equals(functionName, "LCMapStringEx", StringComparison.OrdinalIgnoreCase)) { _AppDomainSortingSetupInfo._pfnLCMapStringEx = functionPointer; } if (String.Equals(functionName, "FindNLSStringEx", StringComparison.OrdinalIgnoreCase)) { _AppDomainSortingSetupInfo._pfnFindNLSStringEx = functionPointer; } if (String.Equals(functionName, "CompareStringOrdinal", StringComparison.OrdinalIgnoreCase)) { _AppDomainSortingSetupInfo._pfnCompareStringOrdinal = functionPointer; } if (String.Equals(functionName, "GetNLSVersionEx", StringComparison.OrdinalIgnoreCase)) { _AppDomainSortingSetupInfo._pfnGetNLSVersionEx = functionPointer; } if (String.Equals(functionName, "FindStringOrdinal", StringComparison.OrdinalIgnoreCase)) { _AppDomainSortingSetupInfo._pfnFindStringOrdinal = functionPointer; } } #endif public String DynamicBase { [System.Security.SecuritySafeCritical] // auto-generated [ResourceExposure(ResourceScope.Machine)] [ResourceConsumption(ResourceScope.Machine)] get { return VerifyDir(Value[(int) LoaderInformation.DynamicBaseValue], true); } [System.Security.SecuritySafeCritical] // auto-generated [ResourceExposure(ResourceScope.Machine)] [ResourceConsumption(ResourceScope.Machine)] set { if (value == null) Value[(int) LoaderInformation.DynamicBaseValue] = null; else { if(ApplicationName == null) throw new MemberAccessException(Environment.GetResourceString("AppDomain_RequireApplicationName")); StringBuilder s = new StringBuilder( NormalizePath(value, false) ); s.Append('\\'); string h = ParseNumbers.IntToString(ApplicationName.GetLegacyNonRandomizedHashCode(), 16, 8, '0', ParseNumbers.PrintAsI4); s.Append(h); Value[(int) LoaderInformation.DynamicBaseValue] = s.ToString(); } } } internal static String DynamicBaseKey { get { return ACTAG_APP_DYNAMIC_BASE; } } public bool DisallowPublisherPolicy { get { return (Value[(int) LoaderInformation.DisallowPublisherPolicyValue] != null); } set { if (value) Value[(int) LoaderInformation.DisallowPublisherPolicyValue]="true"; else Value[(int) LoaderInformation.DisallowPublisherPolicyValue]=null; } } public bool DisallowBindingRedirects { get { return (Value[(int) LoaderInformation.DisallowBindingRedirectsValue] != null); } set { if (value) Value[(int) LoaderInformation.DisallowBindingRedirectsValue] = "true"; else Value[(int) LoaderInformation.DisallowBindingRedirectsValue] = null; } } public bool DisallowCodeDownload { get { return (Value[(int) LoaderInformation.DisallowCodeDownloadValue] != null); } set { if (value) Value[(int) LoaderInformation.DisallowCodeDownloadValue] = "true"; else Value[(int) LoaderInformation.DisallowCodeDownloadValue] = null; } } public bool DisallowApplicationBaseProbing { get { return (Value[(int) LoaderInformation.DisallowAppBaseProbingValue] != null); } set { if (value) Value[(int) LoaderInformation.DisallowAppBaseProbingValue] = "true"; else Value[(int) LoaderInformation.DisallowAppBaseProbingValue] = null; } } [System.Security.SecurityCritical] // auto-generated [ResourceExposure(ResourceScope.Machine)] [ResourceConsumption(ResourceScope.Machine)] private String VerifyDir(String dir, bool normalize) { if (dir != null) { if (dir.Length == 0) dir = null; else { if (normalize) dir = NormalizePath(dir, true); // The only way AppDomainSetup is exposed in coreclr is through the AppDomainManager // and the AppDomainManager is a SecurityCritical type. Also, all callers of callstacks // leading from VerifyDir are SecurityCritical. So we can remove the Demand because // we have validated that all callers are SecurityCritical #if !FEATURE_CORECLR if (IsFilePath(dir)) new FileIOPermission( FileIOPermissionAccess.PathDiscovery, dir ).Demand(); #endif // !FEATURE_CORECLR } } return dir; } [System.Security.SecurityCritical] // auto-generated [ResourceExposure(ResourceScope.Machine)] [ResourceConsumption(ResourceScope.Machine)] private void VerifyDirList(String dirs) { if (dirs != null) { String[] dirArray = dirs.Split(';'); int len = dirArray.Length; for (int i = 0; i < len; i++) VerifyDir(dirArray[i], true); } } internal String DeveloperPath { [System.Security.SecurityCritical] // auto-generated [ResourceExposure(ResourceScope.Machine)] [ResourceConsumption(ResourceScope.Machine)] get { String dirs = Value[(int) LoaderInformation.DevPathValue]; VerifyDirList(dirs); return dirs; } [ResourceExposure(ResourceScope.Machine)] [ResourceConsumption(ResourceScope.Machine)] set { if(value == null) Value[(int) LoaderInformation.DevPathValue] = null; else { String[] directories = value.Split(';'); int size = directories.Length; StringBuilder newPath = StringBuilderCache.Acquire(); bool fDelimiter = false; for(int i = 0; i < size; i++) { if(directories[i].Length != 0) { if(fDelimiter) newPath.Append(";"); else fDelimiter = true; newPath.Append(Path.GetFullPathInternal(directories[i])); } } String newString = StringBuilderCache.GetStringAndRelease(newPath); if (newString.Length == 0) Value[(int) LoaderInformation.DevPathValue] = null; else Value[(int) LoaderInformation.DevPathValue] = newString; } } } internal static String DisallowPublisherPolicyKey { get { return ACTAG_DISALLOW_APPLYPUBLISHERPOLICY; } } internal static String DisallowCodeDownloadKey { get { return ACTAG_CODE_DOWNLOAD_DISABLED; } } internal static String DisallowBindingRedirectsKey { get { return ACTAG_DISALLOW_APP_BINDING_REDIRECTS; } } internal static String DeveloperPathKey { get { return ACTAG_DEV_PATH; } } internal static String DisallowAppBaseProbingKey { get { return ACTAG_DISALLOW_APP_BASE_PROBING; } } public String ApplicationName { get { return Value[(int) LoaderInformation.ApplicationNameValue]; } set { Value[(int) LoaderInformation.ApplicationNameValue] = value; } } internal static String ApplicationNameKey { get { return ACTAG_APP_NAME; } } [XmlIgnoreMember] public AppDomainInitializer AppDomainInitializer { get { return _AppDomainInitializer; } set { _AppDomainInitializer = value; } } public string[] AppDomainInitializerArguments { get { return _AppDomainInitializerArguments; } set { _AppDomainInitializerArguments = value; } } #if FEATURE_CLICKONCE [XmlIgnoreMember] public ActivationArguments ActivationArguments { [Pure] get { return _ActivationArguments; } set { _ActivationArguments = value; } } #endif // !FEATURE_CLICKONCE internal ApplicationTrust InternalGetApplicationTrust() { if (_ApplicationTrust == null) return null; #if FEATURE_CORECLR ApplicationTrust grantSet = new ApplicationTrust(NamedPermissionSet.GetBuiltInSet(_ApplicationTrust)); #else SecurityElement securityElement = SecurityElement.FromString(_ApplicationTrust); ApplicationTrust grantSet = new ApplicationTrust(); grantSet.FromXml(securityElement); #endif return grantSet; } #if FEATURE_CORECLR internal void InternalSetApplicationTrust(String permissionSetName) { _ApplicationTrust = permissionSetName; } #else internal void InternalSetApplicationTrust(ApplicationTrust value) { if (value != null) { _ApplicationTrust = value.ToXml().ToString(); } else { _ApplicationTrust = null; } } #endif #if FEATURE_CLICKONCE [XmlIgnoreMember] public ApplicationTrust ApplicationTrust { get { return InternalGetApplicationTrust(); } set { InternalSetApplicationTrust(value); } } #else // FEATURE_CLICKONCE [XmlIgnoreMember] internal ApplicationTrust ApplicationTrust { get { return InternalGetApplicationTrust(); } #if !FEATURE_CORECLR set { InternalSetApplicationTrust(value); } #endif } #endif // FEATURE_CLICKONCE public String PrivateBinPath { [System.Security.SecuritySafeCritical] // auto-generated [ResourceExposure(ResourceScope.Machine)] [ResourceConsumption(ResourceScope.Machine)] get { String dirs = Value[(int) LoaderInformation.PrivateBinPathValue]; VerifyDirList(dirs); return dirs; } [ResourceExposure(ResourceScope.Machine)] [ResourceConsumption(ResourceScope.Machine)] set { Value[(int) LoaderInformation.PrivateBinPathValue] = value; } } internal static String PrivateBinPathKey { get { return ACTAG_APP_PRIVATE_BINPATH; } } public String PrivateBinPathProbe { get { return Value[(int) LoaderInformation.PrivateBinPathProbeValue]; } set { Value[(int) LoaderInformation.PrivateBinPathProbeValue] = value; } } internal static String PrivateBinPathProbeKey { get { return ACTAG_BINPATH_PROBE_ONLY; } } public String ShadowCopyDirectories { [System.Security.SecuritySafeCritical] // auto-generated [ResourceExposure(ResourceScope.Machine)] [ResourceConsumption(ResourceScope.Machine)] get { String dirs = Value[(int) LoaderInformation.ShadowCopyDirectoriesValue]; VerifyDirList(dirs); return dirs; } [ResourceExposure(ResourceScope.Machine)] [ResourceConsumption(ResourceScope.Machine)] set { Value[(int) LoaderInformation.ShadowCopyDirectoriesValue] = value; } } internal static String ShadowCopyDirectoriesKey { get { return ACTAG_APP_SHADOW_COPY_DIRS; } } public String ShadowCopyFiles { get { return Value[(int) LoaderInformation.ShadowCopyFilesValue]; } set { if((value != null) && (String.Compare(value, "true", StringComparison.OrdinalIgnoreCase) == 0)) Value[(int) LoaderInformation.ShadowCopyFilesValue] = value; else Value[(int) LoaderInformation.ShadowCopyFilesValue] = null; } } internal static String ShadowCopyFilesKey { get { return ACTAG_FORCE_CACHE_INSTALL; } } public String CachePath { [System.Security.SecuritySafeCritical] // auto-generated [ResourceExposure(ResourceScope.Machine)] [ResourceConsumption(ResourceScope.Machine)] get { return VerifyDir(Value[(int) LoaderInformation.CachePathValue], false); } [ResourceExposure(ResourceScope.Machine)] [ResourceConsumption(ResourceScope.Machine)] set { Value[(int) LoaderInformation.CachePathValue] = NormalizePath(value, false); } } internal static String CachePathKey { get { return ACTAG_APP_CACHE_BASE; } } public String LicenseFile { [System.Security.SecuritySafeCritical] // auto-generated [ResourceExposure(ResourceScope.Machine)] [ResourceConsumption(ResourceScope.Machine)] get { return VerifyDir(Value[(int) LoaderInformation.LicenseFileValue], true); } [ResourceExposure(ResourceScope.Machine)] [ResourceConsumption(ResourceScope.Machine)] set { Value[(int) LoaderInformation.LicenseFileValue] = value; } } public LoaderOptimization LoaderOptimization { get { return _LoaderOptimization; } set { _LoaderOptimization = value; } } internal static string LoaderOptimizationKey { get { return LOADER_OPTIMIZATION; } } internal static string ConfigurationExtension { get { return CONFIGURATION_EXTENSION; } } internal static String PrivateBinPathEnvironmentVariable { get { return APPENV_RELATIVEPATH; } } internal static string RuntimeConfigurationFile { get { return MACHINE_CONFIGURATION_FILE; } } internal static string MachineConfigKey { get { return ACTAG_MACHINE_CONFIG; } } internal static string HostBindingKey { get { return ACTAG_HOST_CONFIG_FILE; } } #if FEATURE_FUSION [SecurityCritical] [ResourceExposure(ResourceScope.None)] [ResourceConsumption(ResourceScope.Machine, ResourceScope.Machine)] internal bool UpdateContextPropertyIfNeeded(LoaderInformation FieldValue, String FieldKey, String UpdatedField, IntPtr fusionContext, AppDomainSetup oldADS) { String FieldString = Value[(int) FieldValue], OldFieldString = (oldADS == null ? null : oldADS.Value[(int) FieldValue]); if (FieldString != OldFieldString) { // Compare references since strings are immutable UpdateContextProperty(fusionContext, FieldKey, UpdatedField == null ? FieldString : UpdatedField); return true; } return false; } [SecurityCritical] [ResourceExposure(ResourceScope.None)] [ResourceConsumption(ResourceScope.Machine, ResourceScope.Machine)] internal void UpdateBooleanContextPropertyIfNeeded(LoaderInformation FieldValue, String FieldKey, IntPtr fusionContext, AppDomainSetup oldADS) { if (Value[(int) FieldValue] != null) UpdateContextProperty(fusionContext, FieldKey, "true"); else if (oldADS != null && oldADS.Value[(int) FieldValue] != null) UpdateContextProperty(fusionContext, FieldKey, "false"); } [SecurityCritical] [ResourceExposure(ResourceScope.None)] [ResourceConsumption(ResourceScope.Machine, ResourceScope.Machine)] internal static bool ByteArraysAreDifferent(Byte[] A, Byte[] B) { int length = A.Length; if (length != B.Length) return true; for(int i = 0; i < length; i++) { if (A[i] != B[i]) return true; } return false; } [System.Security.SecurityCritical] // auto-generated [ResourceExposure(ResourceScope.None)] [ResourceConsumption(ResourceScope.Machine, ResourceScope.Machine)] internal static void UpdateByteArrayContextPropertyIfNeeded(Byte[] NewArray, Byte[] OldArray, String FieldKey, IntPtr fusionContext) { if ((NewArray != null && OldArray == null) || (NewArray == null && OldArray != null) || (NewArray != null && OldArray != null && ByteArraysAreDifferent(NewArray, OldArray))) UpdateContextProperty(fusionContext, FieldKey, NewArray); } [System.Security.SecurityCritical] // auto-generated [ResourceExposure(ResourceScope.None)] [ResourceConsumption(ResourceScope.Machine, ResourceScope.Machine)] internal void SetupFusionContext(IntPtr fusionContext, AppDomainSetup oldADS) { UpdateContextPropertyIfNeeded(LoaderInformation.ApplicationBaseValue, ApplicationBaseKey, null, fusionContext, oldADS); UpdateContextPropertyIfNeeded(LoaderInformation.PrivateBinPathValue, PrivateBinPathKey, null, fusionContext, oldADS); UpdateContextPropertyIfNeeded(LoaderInformation.DevPathValue, DeveloperPathKey, null, fusionContext, oldADS); UpdateBooleanContextPropertyIfNeeded(LoaderInformation.DisallowPublisherPolicyValue, DisallowPublisherPolicyKey, fusionContext, oldADS); UpdateBooleanContextPropertyIfNeeded(LoaderInformation.DisallowCodeDownloadValue, DisallowCodeDownloadKey, fusionContext, oldADS); UpdateBooleanContextPropertyIfNeeded(LoaderInformation.DisallowBindingRedirectsValue, DisallowBindingRedirectsKey, fusionContext, oldADS); UpdateBooleanContextPropertyIfNeeded(LoaderInformation.DisallowAppBaseProbingValue, DisallowAppBaseProbingKey, fusionContext, oldADS); if(UpdateContextPropertyIfNeeded(LoaderInformation.ShadowCopyFilesValue, ShadowCopyFilesKey, ShadowCopyFiles, fusionContext, oldADS)) { // If we are asking for shadow copy directories then default to // only to the ones that are in the private bin path. if(Value[(int) LoaderInformation.ShadowCopyDirectoriesValue] == null) ShadowCopyDirectories = BuildShadowCopyDirectories(); UpdateContextPropertyIfNeeded(LoaderInformation.ShadowCopyDirectoriesValue, ShadowCopyDirectoriesKey, null, fusionContext, oldADS); } UpdateContextPropertyIfNeeded(LoaderInformation.CachePathValue, CachePathKey, null, fusionContext, oldADS); UpdateContextPropertyIfNeeded(LoaderInformation.PrivateBinPathProbeValue, PrivateBinPathProbeKey, PrivateBinPathProbe, fusionContext, oldADS); UpdateContextPropertyIfNeeded(LoaderInformation.ConfigurationFileValue, ConfigurationFileKey, null, fusionContext, oldADS); UpdateByteArrayContextPropertyIfNeeded(_ConfigurationBytes, oldADS == null ? null : oldADS.GetConfigurationBytes(), ConfigurationBytesKey, fusionContext); UpdateContextPropertyIfNeeded(LoaderInformation.ApplicationNameValue, ApplicationNameKey, ApplicationName, fusionContext, oldADS); UpdateContextPropertyIfNeeded(LoaderInformation.DynamicBaseValue, DynamicBaseKey, null, fusionContext, oldADS); // Always add the runtime configuration file to the appdomain UpdateContextProperty(fusionContext, MachineConfigKey, RuntimeEnvironment.GetRuntimeDirectoryImpl() + RuntimeConfigurationFile); String hostBindingFile = RuntimeEnvironment.GetHostBindingFile(); if(hostBindingFile != null || oldADS != null) // If oldADS != null, we don't know the old value of the hostBindingFile, so we force an update even when hostBindingFile == null. UpdateContextProperty(fusionContext, HostBindingKey, hostBindingFile); } [System.Security.SecurityCritical] // auto-generated [ResourceExposure(ResourceScope.None)] [MethodImplAttribute(MethodImplOptions.InternalCall)] internal static extern void UpdateContextProperty(IntPtr fusionContext, string key, Object value); #endif // FEATURE_FUSION static internal int Locate(String s) { if(String.IsNullOrEmpty(s)) return -1; #if FEATURE_FUSION // verify assumptions hardcoded into the switch below Contract.Assert('A' == ACTAG_APP_CONFIG_FILE[0] , "Assumption violated"); Contract.Assert('A' == ACTAG_APP_NAME[0] , "Assumption violated"); Contract.Assert('A' == ACTAG_APP_BASE_URL[0] , "Assumption violated"); Contract.Assert('B' == ACTAG_BINPATH_PROBE_ONLY[0] , "Assumption violated"); Contract.Assert('C' == ACTAG_APP_CACHE_BASE[0] , "Assumption violated"); Contract.Assert('D' == ACTAG_DEV_PATH[0] , "Assumption violated"); Contract.Assert('D' == ACTAG_APP_DYNAMIC_BASE[0] , "Assumption violated"); Contract.Assert('F' == ACTAG_FORCE_CACHE_INSTALL[0] , "Assumption violated"); Contract.Assert('L' == LICENSE_FILE[0] , "Assumption violated"); Contract.Assert('P' == ACTAG_APP_PRIVATE_BINPATH[0] , "Assumption violated"); Contract.Assert('S' == ACTAG_APP_SHADOW_COPY_DIRS[0], "Assumption violated"); Contract.Assert('D' == ACTAG_DISALLOW_APPLYPUBLISHERPOLICY[0], "Assumption violated"); Contract.Assert('C' == ACTAG_CODE_DOWNLOAD_DISABLED[0], "Assumption violated"); Contract.Assert('D' == ACTAG_DISALLOW_APP_BINDING_REDIRECTS[0], "Assumption violated"); Contract.Assert('D' == ACTAG_DISALLOW_APP_BASE_PROBING[0], "Assumption violated"); Contract.Assert('A' == ACTAG_APP_CONFIG_BLOB[0], "Assumption violated"); switch (s[0]) { case 'A': if (s == ACTAG_APP_CONFIG_FILE) return (int)LoaderInformation.ConfigurationFileValue; if (s == ACTAG_APP_NAME) return (int)LoaderInformation.ApplicationNameValue; if (s == ACTAG_APP_BASE_URL) return (int)LoaderInformation.ApplicationBaseValue; if (s == ACTAG_APP_CONFIG_BLOB) return (int)LoaderInformation.ConfigurationBytesValue; break; case 'B': if (s == ACTAG_BINPATH_PROBE_ONLY) return (int)LoaderInformation.PrivateBinPathProbeValue; break; case 'C': if (s == ACTAG_APP_CACHE_BASE) return (int)LoaderInformation.CachePathValue; if (s == ACTAG_CODE_DOWNLOAD_DISABLED) return (int)LoaderInformation.DisallowCodeDownloadValue; break; case 'D': if (s == ACTAG_DEV_PATH) return (int)LoaderInformation.DevPathValue; if (s == ACTAG_APP_DYNAMIC_BASE) return (int)LoaderInformation.DynamicBaseValue; if (s == ACTAG_DISALLOW_APPLYPUBLISHERPOLICY) return (int)LoaderInformation.DisallowPublisherPolicyValue; if (s == ACTAG_DISALLOW_APP_BINDING_REDIRECTS) return (int)LoaderInformation.DisallowBindingRedirectsValue; if (s == ACTAG_DISALLOW_APP_BASE_PROBING) return (int)LoaderInformation.DisallowAppBaseProbingValue; break; case 'F': if (s == ACTAG_FORCE_CACHE_INSTALL) return (int)LoaderInformation.ShadowCopyFilesValue; break; case 'L': if (s == LICENSE_FILE) return (int)LoaderInformation.LicenseFileValue; break; case 'P': if (s == ACTAG_APP_PRIVATE_BINPATH) return (int)LoaderInformation.PrivateBinPathValue; break; case 'S': if (s == ACTAG_APP_SHADOW_COPY_DIRS) return (int)LoaderInformation.ShadowCopyDirectoriesValue; break; } #else Contract.Assert('A' == ACTAG_APP_BASE_URL[0] , "Assumption violated"); if (s[0]=='A' && s == ACTAG_APP_BASE_URL) return (int)LoaderInformation.ApplicationBaseValue; #endif //FEATURE_FUSION return -1; } #if FEATURE_FUSION private string BuildShadowCopyDirectories() { // Default to only to the ones that are in the private bin path. String binPath = Value[(int) LoaderInformation.PrivateBinPathValue]; if(binPath == null) return null; StringBuilder result = StringBuilderCache.Acquire(); String appBase = Value[(int) LoaderInformation.ApplicationBaseValue]; if(appBase != null) { char[] sep = {';'}; string[] directories = binPath.Split(sep); int size = directories.Length; bool appendSlash = !( (appBase[appBase.Length-1] == '/') || (appBase[appBase.Length-1] == '\\') ); if (size == 0) { result.Append(appBase); if (appendSlash) result.Append('\\'); result.Append(binPath); } else { for(int i = 0; i < size; i++) { result.Append(appBase); if (appendSlash) result.Append('\\'); result.Append(directories[i]); if (i < size-1) result.Append(';'); } } } return StringBuilderCache.GetStringAndRelease(result); } #endif // FEATURE_FUSION #if FEATURE_COMINTEROP public bool SandboxInterop { get { return _DisableInterfaceCache; } set { _DisableInterfaceCache = value; } } #endif // FEATURE_COMINTEROP } }
using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Reflection; namespace Office_Add_in_ASPNET_SSO_WebAPI.Areas.HelpPage { /// <summary> /// This class will create an object of a given type and populate it with sample data. /// </summary> public class ObjectGenerator { internal const int DefaultCollectionSize = 2; private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator(); /// <summary> /// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types: /// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc. /// Complex types: POCO types. /// Nullables: <see cref="Nullable{T}"/>. /// Arrays: arrays of simple types or complex types. /// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/> /// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc /// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>. /// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>. /// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>An object of the given type.</returns> public object GenerateObject(Type type) { return GenerateObject(type, new Dictionary<Type, object>()); } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")] private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences) { try { if (SimpleTypeObjectGenerator.CanGenerateObject(type)) { return SimpleObjectGenerator.GenerateObject(type); } if (type.IsArray) { return GenerateArray(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsGenericType) { return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IDictionary)) { return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences); } if (typeof(IDictionary).IsAssignableFrom(type)) { return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IList) || type == typeof(IEnumerable) || type == typeof(ICollection)) { return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences); } if (typeof(IList).IsAssignableFrom(type)) { return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IQueryable)) { return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsEnum) { return GenerateEnum(type); } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } } catch { // Returns null if anything fails return null; } return null; } private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences) { Type genericTypeDefinition = type.GetGenericTypeDefinition(); if (genericTypeDefinition == typeof(Nullable<>)) { return GenerateNullable(type, createdObjectReferences); } if (genericTypeDefinition == typeof(KeyValuePair<,>)) { return GenerateKeyValuePair(type, createdObjectReferences); } if (IsTuple(genericTypeDefinition)) { return GenerateTuple(type, createdObjectReferences); } Type[] genericArguments = type.GetGenericArguments(); if (genericArguments.Length == 1) { if (genericTypeDefinition == typeof(IList<>) || genericTypeDefinition == typeof(IEnumerable<>) || genericTypeDefinition == typeof(ICollection<>)) { Type collectionType = typeof(List<>).MakeGenericType(genericArguments); return GenerateCollection(collectionType, collectionSize, createdObjectReferences); } if (genericTypeDefinition == typeof(IQueryable<>)) { return GenerateQueryable(type, collectionSize, createdObjectReferences); } Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]); if (closedCollectionType.IsAssignableFrom(type)) { return GenerateCollection(type, collectionSize, createdObjectReferences); } } if (genericArguments.Length == 2) { if (genericTypeDefinition == typeof(IDictionary<,>)) { Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments); return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences); } Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]); if (closedDictionaryType.IsAssignableFrom(type)) { return GenerateDictionary(type, collectionSize, createdObjectReferences); } } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } return null; } private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = type.GetGenericArguments(); object[] parameterValues = new object[genericArgs.Length]; bool failedToCreateTuple = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < genericArgs.Length; i++) { parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences); failedToCreateTuple &= parameterValues[i] == null; } if (failedToCreateTuple) { return null; } object result = Activator.CreateInstance(type, parameterValues); return result; } private static bool IsTuple(Type genericTypeDefinition) { return genericTypeDefinition == typeof(Tuple<>) || genericTypeDefinition == typeof(Tuple<,>) || genericTypeDefinition == typeof(Tuple<,,>) || genericTypeDefinition == typeof(Tuple<,,,>) || genericTypeDefinition == typeof(Tuple<,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,,>); } private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = keyValuePairType.GetGenericArguments(); Type typeK = genericArgs[0]; Type typeV = genericArgs[1]; ObjectGenerator objectGenerator = new ObjectGenerator(); object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences); object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences); if (keyObject == null && valueObject == null) { // Failed to create key and values return null; } object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject); return result; } private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = arrayType.GetElementType(); Array result = Array.CreateInstance(type, size); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); result.SetValue(element, i); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences) { Type typeK = typeof(object); Type typeV = typeof(object); if (dictionaryType.IsGenericType) { Type[] genericArgs = dictionaryType.GetGenericArguments(); typeK = genericArgs[0]; typeV = genericArgs[1]; } object result = Activator.CreateInstance(dictionaryType); MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd"); MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey"); ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences); if (newKey == null) { // Cannot generate a valid key return null; } bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey }); if (!containsKey) { object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences); addMethod.Invoke(result, new object[] { newKey, newValue }); } } return result; } private static object GenerateEnum(Type enumType) { Array possibleValues = Enum.GetValues(enumType); if (possibleValues.Length > 0) { return possibleValues.GetValue(0); } return null; } private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences) { bool isGeneric = queryableType.IsGenericType; object list; if (isGeneric) { Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments()); list = GenerateCollection(listType, size, createdObjectReferences); } else { list = GenerateArray(typeof(object[]), size, createdObjectReferences); } if (list == null) { return null; } if (isGeneric) { Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments()); MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType }); return asQueryableMethod.Invoke(null, new[] { list }); } return Queryable.AsQueryable((IEnumerable)list); } private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = collectionType.IsGenericType ? collectionType.GetGenericArguments()[0] : typeof(object); object result = Activator.CreateInstance(collectionType); MethodInfo addMethod = collectionType.GetMethod("Add"); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); addMethod.Invoke(result, new object[] { element }); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences) { Type type = nullableType.GetGenericArguments()[0]; ObjectGenerator objectGenerator = new ObjectGenerator(); return objectGenerator.GenerateObject(type, createdObjectReferences); } private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences) { object result = null; if (createdObjectReferences.TryGetValue(type, out result)) { // The object has been created already, just return it. This will handle the circular reference case. return result; } if (type.IsValueType) { result = Activator.CreateInstance(type); } else { ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes); if (defaultCtor == null) { // Cannot instantiate the type because it doesn't have a default constructor return null; } result = defaultCtor.Invoke(new object[0]); } createdObjectReferences.Add(type, result); SetPublicProperties(type, result, createdObjectReferences); SetPublicFields(type, result, createdObjectReferences); return result; } private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (PropertyInfo property in properties) { if (property.CanWrite) { object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences); property.SetValue(obj, propertyValue, null); } } } private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (FieldInfo field in fields) { object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences); field.SetValue(obj, fieldValue); } } private class SimpleTypeObjectGenerator { private long _index = 0; private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators(); [SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")] private static Dictionary<Type, Func<long, object>> InitializeGenerators() { return new Dictionary<Type, Func<long, object>> { { typeof(Boolean), index => true }, { typeof(Byte), index => (Byte)64 }, { typeof(Char), index => (Char)65 }, { typeof(DateTime), index => DateTime.Now }, { typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) }, { typeof(DBNull), index => DBNull.Value }, { typeof(Decimal), index => (Decimal)index }, { typeof(Double), index => (Double)(index + 0.1) }, { typeof(Guid), index => Guid.NewGuid() }, { typeof(Int16), index => (Int16)(index % Int16.MaxValue) }, { typeof(Int32), index => (Int32)(index % Int32.MaxValue) }, { typeof(Int64), index => (Int64)index }, { typeof(Object), index => new object() }, { typeof(SByte), index => (SByte)64 }, { typeof(Single), index => (Single)(index + 0.1) }, { typeof(String), index => { return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index); } }, { typeof(TimeSpan), index => { return TimeSpan.FromTicks(1234567); } }, { typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) }, { typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) }, { typeof(UInt64), index => (UInt64)index }, { typeof(Uri), index => { return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index)); } }, }; } public static bool CanGenerateObject(Type type) { return DefaultGenerators.ContainsKey(type); } public object GenerateObject(Type type) { return DefaultGenerators[type](++_index); } } } }
/* * Velcro Physics: * Copyright (c) 2017 Ian Qvist * * Original source Box2D: * Copyright (c) 2006-2011 Erin Catto http://www.box2d.org * * This software is provided 'as-is', without any express or implied * warranty. In no event will the authors be held liable for any damages * arising from the use of this software. * Permission is granted to anyone to use this software for any purpose, * including commercial applications, and to alter it and redistribute it * freely, subject to the following restrictions: * 1. The origin of this software must not be misrepresented; you must not * claim that you wrote the original software. If you use this software * in a product, an acknowledgment in the product documentation would be * appreciated but is not required. * 2. Altered source versions must be plainly marked as such, and must not be * misrepresented as being the original software. * 3. This notice may not be removed or altered from any source distribution. */ using Microsoft.Xna.Framework; using QEngine.Physics.Collision.RayCast; using QEngine.Physics.Shared; using QEngine.Physics.Tools.ConvexHull.GiftWrap; using QEngine.Physics.Utilities; namespace QEngine.Physics.Collision.Shapes { /// <summary> /// Represents a simple non-selfintersecting convex polygon. /// Create a convex hull from the given array of points. /// </summary> public class PolygonShape : Shape { private Vertices _normals; private Vertices _vertices; /// <summary> /// Initializes a new instance of the <see cref="PolygonShape" /> class. /// </summary> /// <param name="vertices">The vertices.</param> /// <param name="density">The density.</param> public PolygonShape(Vertices vertices, float density) : base(density) { ShapeType = ShapeType.Polygon; _radius = Settings.PolygonRadius; Vertices = vertices; } /// <summary> /// Create a new PolygonShape with the specified density. /// </summary> /// <param name="density">The density.</param> public PolygonShape(float density) : base(density) { System.Diagnostics.Debug.Assert(density >= 0f); ShapeType = ShapeType.Polygon; _radius = Settings.PolygonRadius; _vertices = new Vertices(Settings.MaxPolygonVertices); _normals = new Vertices(Settings.MaxPolygonVertices); } internal PolygonShape() : base(0) { ShapeType = ShapeType.Polygon; _radius = Settings.PolygonRadius; _vertices = new Vertices(Settings.MaxPolygonVertices); _normals = new Vertices(Settings.MaxPolygonVertices); } /// <summary> /// Create a convex hull from the given array of local points. /// The number of vertices must be in the range [3, Settings.MaxPolygonVertices]. /// Warning: the points may be re-ordered, even if they form a convex polygon /// Warning: collinear points are handled but not removed. Collinear points may lead to poor stacking behavior. /// </summary> public Vertices Vertices { get { return _vertices; } set { System.Diagnostics.Debug.Assert(value.Count >= 3 && value.Count <= Settings.MaxPolygonVertices); if (Settings.UseConvexHullPolygons) { //Velcro: This check is required as the GiftWrap algorithm early exits on triangles //So instead of giftwrapping a triangle, we just force it to be clock wise. if (value.Count <= 3) { _vertices = new Vertices(value); _vertices.ForceCounterClockWise(); } else _vertices = GiftWrap.GetConvexHull(value); } else { _vertices = new Vertices(value); } _normals = new Vertices(_vertices.Count); // Compute normals. Ensure the edges have non-zero length. for (int i = 0; i < _vertices.Count; ++i) { int i1 = i; int i2 = i + 1 < _vertices.Count ? i + 1 : 0; Vector2 edge = _vertices[i2] - _vertices[i1]; System.Diagnostics.Debug.Assert(edge.LengthSquared() > Settings.Epsilon * Settings.Epsilon); Vector2 temp = MathUtils.Cross(edge, 1.0f); temp.Normalize(); _normals.Add(temp); } // Compute the polygon mass data ComputeProperties(); } } public Vertices Normals => _normals; public override int ChildCount => 1; protected override void ComputeProperties() { // Polygon mass, centroid, and inertia. // Let rho be the polygon density in mass per unit area. // Then: // mass = rho * int(dA) // centroid.x = (1/mass) * rho * int(x * dA) // centroid.y = (1/mass) * rho * int(y * dA) // I = rho * int((x*x + y*y) * dA) // // We can compute these integrals by summing all the integrals // for each triangle of the polygon. To evaluate the integral // for a single triangle, we make a change of variables to // the (u,v) coordinates of the triangle: // x = x0 + e1x * u + e2x * v // y = y0 + e1y * u + e2y * v // where 0 <= u && 0 <= v && u + v <= 1. // // We integrate u from [0,1-v] and then v from [0,1]. // We also need to use the Jacobian of the transformation: // D = cross(e1, e2) // // Simplification: triangle centroid = (1/3) * (p1 + p2 + p3) // // The rest of the derivation is handled by computer algebra. System.Diagnostics.Debug.Assert(Vertices.Count >= 3); //Velcro optimization: Early exit as polygons with 0 density does not have any properties. if (_density <= 0) return; //Velcro optimization: Consolidated the calculate centroid and mass code to a single method. Vector2 center = Vector2.Zero; float area = 0.0f; float I = 0.0f; //Velcro: We change the reference point to be inside the polygon // pRef is the reference point for forming triangles. // It's location doesn't change the result (except for rounding error). Vector2 s = Vector2.Zero; // This code would put the reference point inside the polygon. for (int i = 0; i < Vertices.Count; ++i) { s += Vertices[i]; } s *= 1.0f / Vertices.Count; const float k_inv3 = 1.0f / 3.0f; for (int i = 0; i < Vertices.Count; ++i) { // Triangle vertices. Vector2 e1 = Vertices[i] - s; Vector2 e2 = i + 1 < Vertices.Count ? Vertices[i + 1] - s : Vertices[0] - s; float D = MathUtils.Cross(e1, e2); float triangleArea = 0.5f * D; area += triangleArea; // Area weighted centroid center += triangleArea * k_inv3 * (e1 + e2); float ex1 = e1.X, ey1 = e1.Y; float ex2 = e2.X, ey2 = e2.Y; float intx2 = ex1 * ex1 + ex2 * ex1 + ex2 * ex2; float inty2 = ey1 * ey1 + ey2 * ey1 + ey2 * ey2; I += (0.25f * k_inv3 * D) * (intx2 + inty2); } //The area is too small for the engine to handle. System.Diagnostics.Debug.Assert(area > Settings.Epsilon); // We save the area MassData.Area = area; // Total mass MassData.Mass = _density * area; // Center of mass center *= 1.0f / area; MassData.Centroid = center + s; // Inertia tensor relative to the local origin (point s). MassData.Inertia = _density * I; // Shift to center of mass then to original body origin. MassData.Inertia += MassData.Mass * (Vector2.Dot(MassData.Centroid, MassData.Centroid) - Vector2.Dot(center, center)); } public override bool TestPoint(ref Transform transform, ref Vector2 point) { Vector2 pLocal = MathUtils.MulT(transform.q, point - transform.p); for (int i = 0; i < Vertices.Count; ++i) { float dot = Vector2.Dot(Normals[i], pLocal - Vertices[i]); if (dot > 0.0f) { return false; } } return true; } public override bool RayCast(out RayCastOutput output, ref RayCastInput input, ref Transform transform, int childIndex) { output = new RayCastOutput(); // Put the ray into the polygon's frame of reference. Vector2 p1 = MathUtils.MulT(transform.q, input.Point1 - transform.p); Vector2 p2 = MathUtils.MulT(transform.q, input.Point2 - transform.p); Vector2 d = p2 - p1; float lower = 0.0f, upper = input.MaxFraction; int index = -1; for (int i = 0; i < Vertices.Count; ++i) { // p = p1 + a * d // dot(normal, p - v) = 0 // dot(normal, p1 - v) + a * dot(normal, d) = 0 float numerator = Vector2.Dot(Normals[i], Vertices[i] - p1); float denominator = Vector2.Dot(Normals[i], d); if (denominator == 0.0f) { if (numerator < 0.0f) { return false; } } else { // Note: we want this predicate without division: // lower < numerator / denominator, where denominator < 0 // Since denominator < 0, we have to flip the inequality: // lower < numerator / denominator <==> denominator * lower > numerator. if (denominator < 0.0f && numerator < lower * denominator) { // Increase lower. // The segment enters this half-space. lower = numerator / denominator; index = i; } else if (denominator > 0.0f && numerator < upper * denominator) { // Decrease upper. // The segment exits this half-space. upper = numerator / denominator; } } // The use of epsilon here causes the assert on lower to trip // in some cases. Apparently the use of epsilon was to make edge // shapes work, but now those are handled separately. //if (upper < lower - b2_epsilon) if (upper < lower) { return false; } } System.Diagnostics.Debug.Assert(0.0f <= lower && lower <= input.MaxFraction); if (index >= 0) { output.Fraction = lower; output.Normal = MathUtils.Mul(transform.q, Normals[index]); return true; } return false; } /// <summary> /// Given a transform, compute the associated axis aligned bounding box for a child shape. /// </summary> /// <param name="aabb">The aabb results.</param> /// <param name="transform">The world transform of the shape.</param> /// <param name="childIndex">The child shape index.</param> public override void ComputeAABB(out AABB aabb, ref Transform transform, int childIndex) { Vector2 lower = MathUtils.Mul(ref transform, Vertices[0]); Vector2 upper = lower; for (int i = 1; i < Vertices.Count; ++i) { Vector2 v = MathUtils.Mul(ref transform, Vertices[i]); lower = Vector2.Min(lower, v); upper = Vector2.Max(upper, v); } Vector2 r = new Vector2(Radius, Radius); aabb.LowerBound = lower - r; aabb.UpperBound = upper + r; } public override float ComputeSubmergedArea(ref Vector2 normal, float offset, ref Transform xf, out Vector2 sc) { sc = Vector2.Zero; //Transform plane into shape co-ordinates Vector2 normalL = MathUtils.MulT(xf.q, normal); float offsetL = offset - Vector2.Dot(normal, xf.p); float[] depths = new float[Settings.MaxPolygonVertices]; int diveCount = 0; int intoIndex = -1; int outoIndex = -1; bool lastSubmerged = false; int i; for (i = 0; i < Vertices.Count; i++) { depths[i] = Vector2.Dot(normalL, Vertices[i]) - offsetL; bool isSubmerged = depths[i] < -Settings.Epsilon; if (i > 0) { if (isSubmerged) { if (!lastSubmerged) { intoIndex = i - 1; diveCount++; } } else { if (lastSubmerged) { outoIndex = i - 1; diveCount++; } } } lastSubmerged = isSubmerged; } switch (diveCount) { case 0: if (lastSubmerged) { //Completely submerged sc = MathUtils.Mul(ref xf, MassData.Centroid); return MassData.Mass / Density; } //Completely dry return 0; case 1: if (intoIndex == -1) { intoIndex = Vertices.Count - 1; } else { outoIndex = Vertices.Count - 1; } break; } int intoIndex2 = (intoIndex + 1) % Vertices.Count; int outoIndex2 = (outoIndex + 1) % Vertices.Count; float intoLambda = (0 - depths[intoIndex]) / (depths[intoIndex2] - depths[intoIndex]); float outoLambda = (0 - depths[outoIndex]) / (depths[outoIndex2] - depths[outoIndex]); Vector2 intoVec = new Vector2(Vertices[intoIndex].X * (1 - intoLambda) + Vertices[intoIndex2].X * intoLambda, Vertices[intoIndex].Y * (1 - intoLambda) + Vertices[intoIndex2].Y * intoLambda); Vector2 outoVec = new Vector2(Vertices[outoIndex].X * (1 - outoLambda) + Vertices[outoIndex2].X * outoLambda, Vertices[outoIndex].Y * (1 - outoLambda) + Vertices[outoIndex2].Y * outoLambda); //Initialize accumulator float area = 0; Vector2 center = new Vector2(0, 0); Vector2 p2 = Vertices[intoIndex2]; const float k_inv3 = 1.0f / 3.0f; //An awkward loop from intoIndex2+1 to outIndex2 i = intoIndex2; while (i != outoIndex2) { i = (i + 1) % Vertices.Count; Vector2 p3; if (i == outoIndex2) p3 = outoVec; else p3 = Vertices[i]; //Add the triangle formed by intoVec,p2,p3 { Vector2 e1 = p2 - intoVec; Vector2 e2 = p3 - intoVec; float D = MathUtils.Cross(e1, e2); float triangleArea = 0.5f * D; area += triangleArea; // Area weighted centroid center += triangleArea * k_inv3 * (intoVec + p2 + p3); } p2 = p3; } //Normalize and transform centroid center *= 1.0f / area; sc = MathUtils.Mul(ref xf, center); return area; } public bool CompareTo(PolygonShape shape) { if (Vertices.Count != shape.Vertices.Count) return false; for (int i = 0; i < Vertices.Count; i++) { if (Vertices[i] != shape.Vertices[i]) return false; } return Radius == shape.Radius && MassData == shape.MassData; } public override Shape Clone() { PolygonShape clone = new PolygonShape(); clone.ShapeType = ShapeType; clone._radius = _radius; clone._density = _density; clone._vertices = new Vertices(_vertices); clone._normals = new Vertices(_normals); clone.MassData = MassData; return clone; } } }
//--------------------------------------------------------------------- // This file is part of the CLR Managed Debugger (mdbg) Sample. // // Copyright (C) Microsoft Corporation. All rights reserved. // // Part of managed wrappers for native debugging APIs. //--------------------------------------------------------------------- using System; using System.Collections.Generic; using System.Text; using System.Runtime.InteropServices; using System.Diagnostics; using Microsoft.Samples.Debugging.Native; using Microsoft.Samples.Debugging.Native.Private; namespace Microsoft.Samples.Debugging.Native { /// <summary> /// Base class for native debug events /// </summary> /// <remarks> /// Base class for events. /// A single process will generate a lot of debug events, so we want to keep them pretty lightweight. /// At the OS level, an event is a DEBUG_EVENT structure, which is about 0x98 bytes /// The managed overhead here is: /// - a copy of the DEBUG_EVENT, fixed up for 32/64-bit alignment /// - a backpointer to the NativePipeline object. /// - a MethodTable pointer (from being a managed object). We take advantage of this by having derived /// objects to expose debug event properties in a friendly way (especially properties requiring /// non-trivial accessing, such as getting a message string out of the debuggee). /// - m_ContinueStatus /// This gives us a rich object model over the somewhat rough model native presents. /// /// Resouce management: /// - the event's ctor describes what can ve done when we first get the event /// - the event's DoCleanupForContinue() method describes what has to be done when the event is continued. /// Events keep a backpointer to the pipeline, which remembers the overall state. This is important because some /// state is introduced via an Enter event (eg, LoadDll) and must be remembered and cleaned up in the corresponding /// exit event (UnloadDll) /// </remarks> public class NativeEvent { // The key data for the native event is the header and union of data. internal DebugEventHeader m_header; // Expose raw events because there's a lot of information here and we haven't yet wrapped it all. public DebugEventUnion m_union; // Builder, returns the proper derived event object internal static NativeEvent Build( NativePipeline pipeline, ref DebugEventHeader header, ref DebugEventUnion union ) { NativeDbgProcess process = pipeline.GetOrCreateProcess((int)header.dwProcessId); switch (header.dwDebugEventCode) { case NativeDebugEventCode.CREATE_PROCESS_DEBUG_EVENT: return new CreateProcessDebugEvent(pipeline, ref header, ref union); case NativeDebugEventCode.EXIT_PROCESS_DEBUG_EVENT: return new ExitProcessDebugEvent(pipeline, ref header, ref union); case NativeDebugEventCode.EXCEPTION_DEBUG_EVENT: return new ExceptionNativeEvent(pipeline, ref header, ref union); case NativeDebugEventCode.LOAD_DLL_DEBUG_EVENT: return new LoadDllNativeEvent(pipeline, ref header, ref union); case NativeDebugEventCode.UNLOAD_DLL_DEBUG_EVENT: return new UnloadDllNativeEvent(pipeline, ref header, ref union); case NativeDebugEventCode.OUTPUT_DEBUG_STRING_EVENT: return new OutputDebugStringNativeEvent(pipeline, ref header, ref union); case NativeDebugEventCode.CREATE_THREAD_DEBUG_EVENT: return new CreateThreadNativeEvent(pipeline, ref header, ref union); case NativeDebugEventCode.EXIT_THREAD_DEBUG_EVENT: return new ExitThreadNativeEvent(pipeline, ref header, ref union); default: return new NativeEvent(pipeline, ref header, ref union); } } // We'd like this to be protected too internal NativeEvent( NativePipeline pipeline, ref DebugEventHeader header, ref DebugEventUnion union ) { m_pipeline = pipeline; // Copy over m_header = header; m_union = union; } // This backpointer to the pipeline lets us access rich information. NativePipeline m_pipeline; /// <summary> /// Get the NativePipeline that this event came from. /// </summary> public NativePipeline Pipeline { get { return m_pipeline; } } /// <summary> /// Get the event code identifying the type of event. /// </summary> /// <remarks>This can also be obtained from the derived class.</remarks> public NativeDebugEventCode EventCode { get { return m_header.dwDebugEventCode; } } /// <summary> /// OS Thread ID of the thread that produced this debug event. /// </summary> /// <remarks>For new threads, this is the id of the new thread, and not /// the id of the thread that called CreateThread.</remarks> public int ThreadId { get { return (int)m_header.dwThreadId; } } /// <summary> /// Process ID of the event /// </summary> public int ProcessId { get { return (int)m_header.dwProcessId; } } /// <summary> /// Process helper object for this event. /// </summary> /// <exception>Throws if process is no longer available</exception> /// <remarks>A process is removed from the pipeline after continuing from the exit process event or /// after calling Detach </remarks> public NativeDbgProcess Process { get { return m_pipeline.GetProcess(ProcessId); } } public override string ToString() { return String.Format("Event Type:tid={0}, code={1}", ThreadId, EventCode); } // Only has meaning for exception events. // If this is 0, then the event has been continued. NativeMethods.ContinueStatus m_ContinueStatus = NativeMethods.ContinueStatus.DBG_EXCEPTION_NOT_HANDLED; internal NativeMethods.ContinueStatus ContinueStatus { get { return m_ContinueStatus; } set { m_ContinueStatus = value; } } /// <summary> /// Do any event cleanup that has to be done when the event is continued /// This should be called by the pipeline when the event is continued. /// </summary> /// <remarks>According to: /// http://msdn.microsoft.com/library/default.asp?url=/library/en-us/debug/base/waitfordebugevent.asp /// When we continue, we should: /// - for a LOAD_DLL_DEBUG_EVENT, call CloseHandle on u.LoadDll.hFile member of the DEBUG_EVENT structure. /// - for CREATE_PROCESS_DEBUG_EVENT, CloseHandle on u.CreateProcess.hFile /// - for OUTPUT_DEBUG_STRING_EVENT, Clear the exception (gh) /// /// The OS will close the handles to the hProcess and hThread objects when calling ContinueDebugEvent. /// </remarks> public virtual void DoCleanupForContinue() { // Default implementation is to do nothing. } #region Get/Set context /// <summary> /// Retrieves the Thread Context of the thread that the event occured on. /// </summary> public INativeContext GetCurrentContext() { INativeContext context = NativeContextAllocator.Allocate(); GetCurrentContext(context); return context; } /// <summary> /// copy the current context into the existing context buffer. Useful to avoid allocating a new context. /// </summary> /// <param name="context">already allocated context buffer</param> public void GetCurrentContext(INativeContext context) { Process.GetThreadContext(this.ThreadId, context); } /// <summary> /// Writes back the Thread Context of the thread that the CreateThreadNativeEvent was generated on /// </summary> /// <remarks>Setting a thread's context is very dangerous operation and must be used properly.</remarks> public void WriteContext(INativeContext context) { IntPtr hThread = IntPtr.Zero; try { hThread = NativeMethods.OpenThread(ThreadAccess.THREAD_ALL_ACCESS, true, (uint)this.ThreadId); using (IContextDirectAccessor w = context.OpenForDirectAccess()) { // context buffer is now locked NativeMethods.SetThreadContext(hThread, w.RawBuffer); } // w is disposed, this unlocks the context buffer. } finally { if (hThread != IntPtr.Zero) { NativeMethods.CloseHandle(hThread); } } } #endregion Get/Set context } /// <summary> /// Derived class for the CREATE_PROCESS_DEBUG_EVENT debug event. /// </summary> public class CreateProcessDebugEvent : NativeEvent { internal CreateProcessDebugEvent( NativePipeline pipeline, ref DebugEventHeader header, ref DebugEventUnion union ) : base(pipeline, ref header, ref union) { Process.InitHandle(union.CreateProcess.hProcess); // Module name of main program is unavailable. Process.AddModule(new NativeDbgModule(Process, "<main program>", union.CreateProcess.lpBaseOfImage, union.CreateProcess.hFile)); } } /// <summary> /// Derived class for EXIT_PROCESS_DEBUG_EVENT. /// </summary> /// <remarks> /// This matches the <see cref="CreateProcessDebugEvent"/> event. /// You can also wait on the process's object handle to tell if the process exited. /// </remarks> public class ExitProcessDebugEvent : NativeEvent { internal ExitProcessDebugEvent( NativePipeline pipeline, ref DebugEventHeader header, ref DebugEventUnion union ) : base(pipeline, ref header, ref union) { } // Called when this event is about to be continued public override void DoCleanupForContinue() { // The OS will clear the handle, so notify Process not to double-close it. Process.ClearHandle(); // Remove the process object (this will dispose it). It will also clear any remaining modules handles. this.Pipeline.RemoveProcess(ProcessId); } } /// <summary> /// Debug event for OUTPUT_DEBUG_STRING_EVENT, representing a log message from kernel32!OutputDebugString /// </summary> public class OutputDebugStringNativeEvent : NativeEvent { internal OutputDebugStringNativeEvent( NativePipeline pipeline, ref DebugEventHeader header, ref DebugEventUnion union ) : base(pipeline, ref header, ref union) { // On some platforms (Win2K), OutputDebugStrings are really exceptions that need to be cleared. this.ContinueStatus = NativeMethods.ContinueStatus.DBG_CONTINUE; } // Read Target to get the string. // No newline is appended. string m_cachedMessage; /// <summary> /// Cache and read the log message from the target. /// </summary> /// <returns></returns> public string ReadMessage() { if (m_cachedMessage == null) { m_cachedMessage = m_union.OutputDebugString.ReadMessageFromTarget(this.Process); } return m_cachedMessage; } public override string ToString() { return String.Format("OutputDebugString:tid={0}, message={1}", ThreadId, ReadMessage()); } } /// <summary> /// Base class for Dll load and unload events /// </summary> public abstract class DllBaseNativeEvent : NativeEvent { internal DllBaseNativeEvent( NativePipeline pipeline, ref DebugEventHeader header, ref DebugEventUnion union ) : base(pipeline, ref header, ref union) { } /// <summary> /// Get the native module associated with this event. /// </summary> public NativeDbgModule Module { get { return Process.LookupModule(BaseAddress); } } /// <summary> /// Get the base address of the module. This is a unique identifier. /// </summary> abstract public IntPtr BaseAddress { get; } } /// <summary> /// Debug event for LOAD_DLL_DEBUG_EVENT. /// </summary> public class LoadDllNativeEvent : DllBaseNativeEvent { internal LoadDllNativeEvent( NativePipeline pipeline, ref DebugEventHeader header, ref DebugEventUnion union ) : base(pipeline, ref header, ref union) { Process.AddModule(new NativeDbgModule(Process, ReadImageName(), BaseAddressWorker, union.LoadDll.hFile)); } /// <summary> /// Non-virtual accessor, so it's safe to use in the ctor. /// </summary> protected IntPtr BaseAddressWorker { get { return m_union.LoadDll.lpBaseOfDll; } } /// <summary> /// Base address of the dll. This can be used to uniquely identify the dll within a process and match load and unload events. /// </summary> public override IntPtr BaseAddress { get { return BaseAddressWorker; } } string m_cachedImageName; /// <summary> /// Get the name of the dll if available. /// </summary> /// <returns>full string name of dll if available</returns> /// <remarks>This must read from the target. The value is cached. </remarks> public string ReadImageName() { if (m_cachedImageName == null) { m_cachedImageName = m_union.LoadDll.ReadImageNameFromTarget(this.Process); // this serves two purposes. // - it gives us a more descriptive name than just null. // - it conveniently sets m_cachedImageName to a non-null value so that we don't keep // trying to read the name in the failure case. if (m_cachedImageName == null) { m_cachedImageName = "(unknown)"; } } return m_cachedImageName; } public override string ToString() { string name = ReadImageName(); return String.Format("DLL Load:Address 0x{0}, {1}", BaseAddress.ToString("x"), name); } } /// <summary> /// Debug event for UNLOAD_DLL_DEBUG_EVENT. /// </summary> public class UnloadDllNativeEvent : DllBaseNativeEvent { internal UnloadDllNativeEvent( NativePipeline pipeline, ref DebugEventHeader header, ref DebugEventUnion union ) : base(pipeline, ref header, ref union) { } /// <summary> /// BaseAddress of module. Matches BaseAddress from the LoadDllNativeEvent. /// </summary> public override IntPtr BaseAddress { get { return m_union.UnloadDll.lpBaseOfDll; } } public override string ToString() { NativeDbgModule module = Module; string name = (module == null) ? "unknown" : Module.Name; return String.Format("DLL unload:Address 0x{0},{1}", BaseAddress.ToString("x"), name); } public override void DoCleanupForContinue() { // For native dlls, need to free the module handle. // If there's no matching Load dll event, then module will be null and we can't do anything. NativeDbgModule module = this.Module; if (module != null) { module.CloseHandle(); Process.RemoveModule(module.BaseAddress); } } } /// <summary> /// Debug event for native thread create. /// </summary> public class CreateThreadNativeEvent : NativeEvent { internal CreateThreadNativeEvent( NativePipeline pipeline, ref DebugEventHeader header, ref DebugEventUnion union ) : base(pipeline, ref header, ref union) { // OS will close the thread handle when the ExitThread event is processed. } } /// <summary> /// Debug event for native thread exit. /// </summary> public class ExitThreadNativeEvent : NativeEvent { internal ExitThreadNativeEvent( NativePipeline pipeline, ref DebugEventHeader header, ref DebugEventUnion union ) : base(pipeline, ref header, ref union) { } /// <summary> /// Get the exit code of the thread. /// </summary> public int ExitCode { get { return unchecked((int)m_union.ExitThread.dwExitCode); } } } /// <summary> /// Represent an exception debug event /// </summary> public class ExceptionNativeEvent : NativeEvent { internal ExceptionNativeEvent( NativePipeline pipeline, ref DebugEventHeader header, ref DebugEventUnion union ) : base(pipeline, ref header, ref union) { } /// <summary> /// Get the exception code identifying the type of exception. /// </summary> public ExceptionCode ExceptionCode { get { return (ExceptionCode)m_union.Exception.ExceptionRecord.ExceptionCode; } } /// <summary> /// Is the exception first-chance or unhandled? /// </summary> public bool FirstChance { get { return m_union.Exception.dwFirstChance != 0; } } /// <summary> /// The address of the exception. /// For hardware exceptions, this is the address of the instruction that generated the fault. /// For software exceptions, this is the address in the OS that raised the exception /// (typically in kernel32!RaiseException) /// </summary> public IntPtr Address { get { return m_union.Exception.ExceptionRecord.ExceptionAddress; } } /// <summary> /// Clears the exception (continue as "gh"). This is an invasive operation that may change /// the debuggee's behavior. /// </summary> public void ClearException() { ContinueStatus = NativeMethods.ContinueStatus.DBG_CONTINUE; } public override string ToString() { // If we recognize the exception code, we want to print out the pretty value // {0} - if recognized, prints name, else prints in Decimal. // {0:x} - always prints in hex string val = String.Format("Exception Event:Tid={3}, 0x{0:x}, {1}, address=0x{2}", ExceptionCode, (FirstChance ? "first chance" : "unhandled"), Address.ToString("x"), ThreadId ); return val; } } } // namespace Microsoft.Samples.Debugging.Native
// // Image.cs // // Author: // Lluis Sanchez <[email protected]> // // Copyright (c) 2011 Xamarin Inc // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using System; using System.Linq; using Xwt.Backends; using System.Reflection; using System.IO; using System.Collections.Generic; namespace Xwt.Drawing { public class Image: XwtObject, IDisposable { Size requestedSize; internal NativeImageRef NativeRef; internal double requestedAlpha = 1; static int[] supportedScales = { 2 }; internal Image () { } internal Image (object backend): base (backend) { Init (); } internal Image (object backend, Toolkit toolkit): base (backend, toolkit) { Init (); } /// <summary> /// Creates a new image that is a copy of another image /// </summary> /// <param name="image">Image.</param> public Image (Image image): base (image.Backend, image.ToolkitEngine) { NativeRef = image.NativeRef; Init (); } internal void Init () { if (NativeRef == null) { NativeRef = new NativeImageRef (Backend, ToolkitEngine); } else NativeRef.AddReference (); } ~Image () { Dispose (false); } public void Dispose () { Dispose (true); GC.SuppressFinalize (this); } protected virtual void Dispose (bool disposing) { if (NativeRef != null) NativeRef.ReleaseReference (disposing); } internal ImageDescription GetImageDescription (Toolkit toolkit) { InitForToolkit (toolkit); return new ImageDescription () { Alpha = requestedAlpha, Size = Size, Backend = Backend }; } internal void InitForToolkit (Toolkit t) { if (ToolkitEngine != t) { var nr = NativeRef.LoadForToolkit (t); ToolkitEngine = t; Backend = nr.Backend; } } /// <summary> /// Loads an image from a resource /// </summary> /// <returns>An image</returns> /// <param name="resource">Resource name</param> /// <remarks> /// This method will look for alternative versions of the image with different resolutions. /// For example, if a resource is named "foo.png", this method will load /// other resources with the name "[email protected]", where XXX can be any arbitrary string. For example "[email protected]". /// Each of those resources will be considered different versions of the same image. /// </remarks> public static Image FromResource (string resource) { if (resource == null) throw new ArgumentNullException ("resource"); return FromResource (Assembly.GetCallingAssembly (), resource); } /// <summary> /// Loads an image from a resource /// </summary> /// <returns>An image</returns> /// <param name="type">Type which identifies the assembly from which to load the image</param> /// <param name="resource">Resource name</param> /// <remarks> /// This method will look for alternative versions of the image with different resolutions. /// For example, if a resource is named "foo.png", this method will load /// other resources with the name "[email protected]", where XXX can be any arbitrary string. For example "[email protected]". /// Each of those resources will be considered different versions of the same image. /// </remarks> public static Image FromResource (Type type, string resource) { if (type == null) throw new ArgumentNullException ("type"); if (resource == null) throw new ArgumentNullException ("resource"); return FromResource (type.Assembly, resource); } /// <summary> /// Loads an image from a resource /// </summary> /// <returns>An image</returns> /// <param name="assembly">The assembly from which to load the image</param> /// <param name="resource">Resource name</param> /// <remarks> /// This method will look for alternative versions of the image with different resolutions. /// For example, if a resource is named "foo.png", this method will load /// other resources with the name "[email protected]", where XXX can be any arbitrary string. For example "[email protected]". /// Each of those resources will be considered different versions of the same image. /// </remarks> public static Image FromResource (Assembly assembly, string resource) { if (assembly == null) throw new ArgumentNullException ("assembly"); if (resource == null) throw new ArgumentNullException ("resource"); var toolkit = Toolkit.CurrentEngine; if (toolkit == null) throw new ToolkitNotInitializedException (); var name = Path.GetFileNameWithoutExtension (resource); var img = toolkit.ImageBackendHandler.LoadFromResource (assembly, resource); if (img == null) throw new InvalidOperationException ("Resource not found: " + resource); var reqSize = toolkit.ImageBackendHandler.GetSize (img); var ext = GetExtension (resource); var altImages = new List<Tuple<string,object>> (); foreach (var r in assembly.GetManifestResourceNames ()) { int i = r.LastIndexOf ('@'); if (i != -1) { string rname = r.Substring (0, i); if (rname == resource || rname == name) { var rim = toolkit.ImageBackendHandler.LoadFromResource (assembly, r); if (rim != null) altImages.Add (new Tuple<string, object> (r, rim)); } } } if (altImages.Count > 0) { altImages.Insert (0, new Tuple<string, object> (resource, img)); if (ext == ".9.png") return CreateComposedNinePatch (toolkit, altImages); img = toolkit.ImageBackendHandler.CreateMultiResolutionImage (altImages.Select (i => i.Item2)); } var res = new Image (img, toolkit) { requestedSize = reqSize }; res.NativeRef.SetResourceSource (assembly, resource); if (ext == ".9.png") res = new NinePatchImage (res.ToBitmap ()); return res; } public static Image CreateMultiSizeIcon (IEnumerable<Image> images) { if (Toolkit.CurrentEngine == null) throw new ToolkitNotInitializedException (); var allImages = images.ToArray (); var img = new Image (Toolkit.CurrentEngine.ImageBackendHandler.CreateMultiSizeIcon (allImages.Select (i => i.GetBackend ()))); if (allImages.All (i => i.NativeRef.HasNativeSource)) { var sources = allImages.Select (i => i.NativeRef.NativeSource).ToArray (); img.NativeRef.SetSources (sources); } return img; } public static Image CreateMultiResolutionImage (IEnumerable<Image> images) { if (Toolkit.CurrentEngine == null) throw new ToolkitNotInitializedException (); return new Image (Toolkit.CurrentEngine.ImageBackendHandler.CreateMultiResolutionImage (images.Select (i => i.GetBackend ()))); } public static Image FromFile (string file) { var toolkit = Toolkit.CurrentEngine; if (toolkit == null) throw new ToolkitNotInitializedException (); var ext = GetExtension (file); var img = toolkit.ImageBackendHandler.LoadFromFile (file); List<Tuple<string,object>> altImages = null; foreach (var s in supportedScales) { var fn = file.Substring (0, file.Length - ext.Length) + "@" + s + "x" + ext; if (File.Exists (fn)) { if (altImages == null) { altImages = new List<Tuple<string, object>> (); altImages.Add (new Tuple<string, object> (file, img)); } altImages.Add (new Tuple<string, object> (fn, toolkit.ImageBackendHandler.LoadFromFile (fn))); } } if (altImages != null) { if (ext == ".9.png") return CreateComposedNinePatch (toolkit, altImages); img = toolkit.ImageBackendHandler.CreateMultiResolutionImage (altImages.Select (i => i.Item2)); } var res = new Image (img, toolkit); if (ext == ".9.png") res = new NinePatchImage (res.ToBitmap ()); return res; } static Image CreateComposedNinePatch (Toolkit toolkit, List<Tuple<string,object>> altImages) { var npImage = new NinePatchImage (); foreach (var fi in altImages) { int i = fi.Item1.LastIndexOf ('@'); double scaleFactor; if (i == -1) scaleFactor = 1; else { int j = fi.Item1.IndexOf ('x', ++i); if (!double.TryParse (fi.Item1.Substring (i, j - i), out scaleFactor)) { toolkit.ImageBackendHandler.Dispose (fi.Item2); continue; } } npImage.AddFrame (new Image (fi.Item2, toolkit).ToBitmap (), scaleFactor); } return npImage; } public static Image FromStream (Stream stream) { var toolkit = Toolkit.CurrentEngine; if (toolkit == null) throw new ToolkitNotInitializedException (); return new Image (toolkit.ImageBackendHandler.LoadFromStream (stream), toolkit); } static string GetExtension (string fileName) { if (fileName.EndsWith (".9.png", StringComparison.Ordinal)) return ".9.png"; else return Path.GetExtension (fileName); } public void Save (string file, ImageFileType fileType) { using (var f = File.OpenWrite (file)) Save (f, fileType); } public void Save (Stream stream, ImageFileType fileType) { ToolkitEngine.ImageBackendHandler.SaveToStream (ToBitmap ().Backend, stream, fileType); } /// <summary> /// Gets a value indicating whether this image has fixed size. /// </summary> /// <value><c>true</c> if this image has fixed size; otherwise, <c>false</c>.</value> /// <remarks> /// Some kinds of images such as vector images or multiple-size icons don't have a fixed size, /// and a specific size has to be chosen before they can be used. A size can be chosen by using /// the WithSize method. /// </remarks> public bool HasFixedSize { get { return !Size.IsZero; } } /// <summary> /// Gets the size of the image /// </summary> /// <value>The size of the image, or Size.Zero if the image doesn't have an intrinsic size</value> public Size Size { get { return !requestedSize.IsZero ? requestedSize : GetDefaultSize (); } internal set { requestedSize = value; } } /// <summary> /// Gets the width of the image /// </summary> /// <value>The width.</value> public double Width { get { return Size.Width; } } /// <summary> /// Gets the height of the image /// </summary> /// <value>The height.</value> public double Height { get { return Size.Height; } } /// <summary> /// Applies an alpha filter to the image /// </summary> /// <returns>A new image with the alpha filter applied</returns> /// <param name="alpha">Alpha to apply</param> /// <remarks>This is a lightweight operation. The alpha filter is applied when the image is rendered. /// The method doesn't make a copy of the image data.</remarks> public Image WithAlpha (double alpha) { return new Image (this) { requestedSize = requestedSize, requestedAlpha = alpha }; } /// <summary> /// Retuns a copy of the image with a specific size /// </summary> /// <returns>A new image with the new size</returns> /// <param name="width">Width.</param> /// <param name="height">Height.</param> /// <remarks> /// This is a lightweight operation. The image is scaled when it is rendered. /// The method doesn't make a copy of the image data. /// </remarks> public Image WithSize (double width, double height) { return new Image (this) { requestedSize = new Size (width, height) }; } /// <summary> /// Retuns a copy of the image with a specific size /// </summary> /// <returns>A new image with the new size</returns> /// <param name="size">The size.</param> /// <remarks> /// This is a lightweight operation. The image is scaled when it is rendered. /// The method doesn't make a copy of the image data. /// </remarks> public Image WithSize (Size size) { return new Image (this) { requestedSize = size }; } /// <summary> /// Retuns a copy of the image with a specific size /// </summary> /// <returns>A new image with the new size</returns> /// <param name="squaredSize">Width and height of the image (the image is expected to be squared)</param> /// <remarks> /// This is a lightweight operation. The image is scaled when it is rendered. /// The method doesn't make a copy of the image data. /// </remarks> public Image WithSize (double squaredSize) { return new Image (this) { requestedSize = new Size (squaredSize, squaredSize) }; } /// <summary> /// Retuns a copy of the image with a specific size /// </summary> /// <returns>A new image with the new size</returns> /// <param name="size">New size</param> /// <remarks> /// This is a lightweight operation. The image is scaled when it is rendered. /// The method doesn't make a copy of the image data. /// </remarks> public Image WithSize (IconSize size) { Size s; switch (size) { case IconSize.Small: s = new Size (16, 16); break; case IconSize.Medium: s = new Size (24, 24); break; case IconSize.Large: s = new Size (32, 32); break; default: throw new ArgumentOutOfRangeException ("size"); } return new Image (this) { requestedSize = s }; } internal Size GetFixedSize () { var size = Size; if (size.IsZero) throw new InvalidOperationException ("Image size has not been set and the image doesn't have a default size"); return size; } /// <summary> /// Retuns a copy of the image with a size that fits the provided size limits /// </summary> /// <returns>The image</returns> /// <param name="maxWidth">Max width.</param> /// <param name="maxHeight">Max height.</param> /// <remarks> /// This is a lightweight operation. The image is scaled when it is rendered. /// The method doesn't make a copy of the image data. /// </remarks> public Image WithBoxSize (double maxWidth, double maxHeight) { var size = GetFixedSize (); var ratio = Math.Min (maxWidth / size.Width, maxHeight / size.Height); return new Image (this) { requestedSize = new Size (size.Width * ratio, size.Height * ratio) }; } /// <summary> /// Retuns a copy of the image with a size that fits the provided size limits /// </summary> /// <returns>The image</returns> /// <param name="maxSize">Max width and height (the image is expected to be squared)</param> /// <remarks> /// This is a lightweight operation. The image is scaled when it is rendered. /// The method doesn't make a copy of the image data. /// </remarks> public Image WithBoxSize (double maxSize) { return WithBoxSize (maxSize, maxSize); } /// <summary> /// Retuns a copy of the image with a size that fits the provided size limits /// </summary> /// <returns>The image</returns> /// <param name="size">Max width and height</param> /// <remarks> /// This is a lightweight operation. The image is scaled when it is rendered. /// The method doesn't make a copy of the image data. /// </remarks> public Image WithBoxSize (Size size) { return WithBoxSize (size.Width, size.Height); } /// <summary> /// Retuns a scaled copy of the image /// </summary> /// <returns>The image</returns> /// <param name="scale">Scale to apply to the image size</param> /// <remarks> /// This is a lightweight operation. The image is scaled when it is rendered. /// The method doesn't make a copy of the image data. /// </remarks> public Image Scale (double scale) { if (!HasFixedSize) throw new InvalidOperationException ("Image must have a size in order to be scaled"); double w = Size.Width * scale; double h = Size.Height * scale; return new Image (this) { requestedSize = new Size (w, h) }; } /// <summary> /// Retuns a scaled copy of the image /// </summary> /// <returns>The image</returns> /// <param name="scaleX">Scale to apply to the width of the image</param> /// <param name="scaleY">Scale to apply to the height of the image</param> /// <remarks> /// This is a lightweight operation. The image is scaled when it is rendered. /// The method doesn't make a copy of the image data. /// </remarks> public Image Scale (double scaleX, double scaleY) { if (!HasFixedSize) throw new InvalidOperationException ("Image must have a size in order to be scaled"); double w = Size.Width * scaleX; double h = Size.Height * scaleY; return new Image (this) { requestedSize = new Size (w, h) }; } /// <summary> /// Converts the image to a bitmap /// </summary> /// <returns>The bitmap.</returns> /// <param name="format">Bitmap format</param> public BitmapImage ToBitmap (ImageFormat format = ImageFormat.ARGB32) { return ToBitmap (1d); } /// <summary> /// Converts the image to a bitmap /// </summary> /// <returns>The bitmap.</returns> /// <param name="renderTarget">Widget to be used as reference for determining the resolution of the bitmap</param> /// <param name="format">Bitmap format</param> public BitmapImage ToBitmap (Widget renderTarget, ImageFormat format = ImageFormat.ARGB32) { if (renderTarget.ParentWindow == null) throw new InvalidOperationException ("renderTarget is not bound to a window"); return ToBitmap (renderTarget.ParentWindow.Screen.ScaleFactor, format); } /// <summary> /// Converts the image to a bitmap /// </summary> /// <returns>The bitmap.</returns> /// <param name="renderTarget">Window to be used as reference for determining the resolution of the bitmap</param> /// <param name="format">Bitmap format</param> public BitmapImage ToBitmap (WindowFrame renderTarget, ImageFormat format = ImageFormat.ARGB32) { return ToBitmap (renderTarget.Screen.ScaleFactor, format); } /// <summary> /// Converts the image to a bitmap /// </summary> /// <returns>The bitmap.</returns> /// <param name="renderTarget">Screen to be used as reference for determining the resolution of the bitmap</param> /// <param name="format">Bitmap format</param> public BitmapImage ToBitmap (Screen renderTarget, ImageFormat format = ImageFormat.ARGB32) { return ToBitmap (renderTarget.ScaleFactor, format); } /// <summary> /// Converts the image to a bitmap /// </summary> /// <returns>The bitmap.</returns> /// <param name="scaleFactor">Scale factor of the bitmap</param> /// <param name="format">Bitmap format</param> public BitmapImage ToBitmap (double scaleFactor, ImageFormat format = ImageFormat.ARGB32) { var s = GetFixedSize (); var bmp = ToolkitEngine.ImageBackendHandler.ConvertToBitmap (Backend, s.Width, s.Height, scaleFactor, format); return new BitmapImage (bmp, s, ToolkitEngine); } protected virtual Size GetDefaultSize () { return ToolkitEngine.ImageBackendHandler.GetSize (Backend); } } class NativeImageRef { object backend; int referenceCount = 1; Toolkit toolkit; NativeImageSource[] sources; public struct NativeImageSource { // Source file or resource name public string Source; // Assembly that contains the resource public Assembly ResourceAssembly; public Func<Stream[]> ImageLoader; public ImageDrawCallback DrawCallback; } public object Backend { get { return backend; } } public Toolkit Toolkit { get { return toolkit; } } public NativeImageSource NativeSource { get { return sources[0]; } } public bool HasNativeSource { get { return sources != null; } } public void SetSources (NativeImageSource[] sources) { this.sources = sources; } public void SetFileSource (string file) { sources = new [] { new NativeImageSource { Source = file, } }; } public void SetResourceSource (Assembly asm, string name) { sources = new [] { new NativeImageSource { Source = name, ResourceAssembly = asm } }; } public void SetStreamSource (Func<Stream[]> imageLoader) { sources = new [] { new NativeImageSource { ImageLoader = imageLoader } }; } public void SetCustomDrawSource (ImageDrawCallback drawCallback) { sources = new [] { new NativeImageSource { DrawCallback = drawCallback } }; } public int ReferenceCount { get { return referenceCount; } } public NativeImageRef (object backend, Toolkit toolkit) { this.backend = backend; this.toolkit = toolkit; NextRef = this; if (toolkit.ImageBackendHandler.DisposeHandleOnUiThread) ResourceManager.RegisterResource (backend, toolkit.ImageBackendHandler.Dispose); } public NativeImageRef LoadForToolkit (Toolkit targetToolkit) { NativeImageRef newRef = null; var r = NextRef; while (r != this) { if (r.toolkit == targetToolkit) { newRef = r; break; } r = r.NextRef; } if (newRef != null) return newRef; object newBackend; if (sources != null) { var frames = new List<object> (); foreach (var s in sources) { if (s.ImageLoader != null) { var streams = s.ImageLoader (); try { if (streams.Length == 1) { newBackend = targetToolkit.ImageBackendHandler.LoadFromStream (streams [0]); } else { var backends = new object[streams.Length]; for (int n = 0; n < backends.Length; n++) { backends [n] = targetToolkit.ImageBackendHandler.LoadFromStream (streams [n]); } newBackend = targetToolkit.ImageBackendHandler.CreateMultiResolutionImage (backends); } } finally { foreach (var st in streams) st.Dispose (); } } else if (s.ResourceAssembly != null) newBackend = targetToolkit.ImageBackendHandler.LoadFromResource (s.ResourceAssembly, s.Source); else if (s.Source != null) newBackend = targetToolkit.ImageBackendHandler.LoadFromFile (s.Source); else if (s.DrawCallback != null) newBackend = targetToolkit.ImageBackendHandler.CreateCustomDrawn (s.DrawCallback); else throw new NotSupportedException (); frames.Add (newBackend); } newBackend = targetToolkit.ImageBackendHandler.CreateMultiSizeIcon (frames); } else { using (var s = new MemoryStream ()) { toolkit.ImageBackendHandler.SaveToStream (backend, s, ImageFileType.Png); s.Position = 0; newBackend = targetToolkit.ImageBackendHandler.LoadFromStream (s); } } newRef = new NativeImageRef (newBackend, targetToolkit); newRef.NextRef = NextRef; NextRef = newRef; return newRef; } public void AddReference () { System.Threading.Interlocked.Increment (ref referenceCount); } public void ReleaseReference (bool disposing) { if (System.Threading.Interlocked.Decrement (ref referenceCount) == 0) { if (disposing) { if (toolkit.ImageBackendHandler.DisposeHandleOnUiThread) ResourceManager.FreeResource (backend); else toolkit.ImageBackendHandler.Dispose (backend); } else ResourceManager.FreeResource (backend); } } /// <summary> /// Reference to the next native image, for a different toolkit /// </summary> public NativeImageRef NextRef { get; set; } } }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for Additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ namespace NPOI.HSLF.Record { using System; using System.IO; using NPOI.Util; using System.Text; /** * Ruler of a text as it differs from the style's ruler Settings. * * @author Yegor Kozlov */ public class TextRulerAtom : RecordAtom { /** * Record header. */ private byte[] _header; /** * Record data. */ private byte[] _data; //ruler internals private int defaultTabSize; private int numLevels; private int[] tabStops; private int[] bulletOffSets = new int[5]; private int[] textOffSets = new int[5]; /** * Constructs a new empty ruler atom. */ public TextRulerAtom() { _header = new byte[8]; _data = new byte[0]; LittleEndian.PutShort(_header, 2, (short)RecordType); LittleEndian.PutInt(_header, 4, _data.Length); } /** * Constructs the ruler atom record from its * source data. * * @param source the source data as a byte array. * @param start the start offset into the byte array. * @param len the length of the slice in the byte array. */ protected TextRulerAtom(byte[] source, int start, int len) { // Get the header. _header = new byte[8]; Array.Copy(source, start, _header, 0, 8); // Get the record data. _data = new byte[len - 8]; Array.Copy(source, start + 8, _data, 0, len - 8); Read(); } /** * Gets the record type. * * @return the record type. */ public override long RecordType { get { return RecordTypes.TextRulerAtom.typeID; } } /** * Write the contents of the record back, so it can be written * to disk. * * @param out the output stream to write to. * @throws java.io.IOException if an error occurs. */ public override void WriteOut(Stream out1) { out1.Write(_header, (int)out1.Position, _header.Length); out1.Write(_data, (int)out1.Position, _data.Length); } /** * Read the record bytes and Initialize the internal variables */ private void Read() { int pos = 0; short mask = LittleEndian.GetShort(_data); pos += 4; short val; int[] bits = { 1, 0, 2, 3, 8, 4, 9, 5, 10, 6, 11, 7, 12 }; for (int i = 0; i < bits.Length; i++) { if ((mask & 1 << bits[i]) != 0) { switch (bits[i]) { case 0: //defaultTabSize defaultTabSize = LittleEndian.GetShort(_data, pos); pos += 2; break; case 1: //numLevels numLevels = LittleEndian.GetShort(_data, pos); pos += 2; break; case 2: //tabStops val = LittleEndian.GetShort(_data, pos); pos += 2; tabStops = new int[val * 2]; for (int j = 0; j < tabStops.Length; j++) { tabStops[j] = LittleEndian.GetUShort(_data, pos); pos += 2; } break; case 3: case 4: case 5: case 6: case 7: //bullet.offset val = LittleEndian.GetShort(_data, pos); pos += 2; bulletOffSets[bits[i] - 3] = val; break; case 8: case 9: case 10: case 11: case 12: //text.offset val = LittleEndian.GetShort(_data, pos); pos += 2; textOffSets[bits[i] - 8] = val; break; } } } } /** * Default distance between tab stops, in master coordinates (576 dpi). */ public int GetDefaultTabSize() { return defaultTabSize; } /** * Number of indent levels (maximum 5). */ public int GetNumberOfLevels() { return numLevels; } /** * Default distance between tab stops, in master coordinates (576 dpi). */ public int[] GetTabStops() { return tabStops; } /** * Paragraph's distance from shape's left margin, in master coordinates (576 dpi). */ public int[] GetTextOffSets() { return textOffSets; } /** * First line of paragraph's distance from shape's left margin, in master coordinates (576 dpi). */ public int[] GetBulletOffSets() { return bulletOffSets; } public static TextRulerAtom GetParagraphInstance() { byte[] data = new byte[] { 0x00, 0x00, (byte)0xA6, 0x0F, 0x0A, 0x00, 0x00, 0x00, 0x10, 0x03, 0x00, 0x00, (byte)0xF9, 0x00, 0x41, 0x01, 0x41, 0x01 }; TextRulerAtom ruler = new TextRulerAtom(data, 0, data.Length); return ruler; } public void SetParagraphIndent(short tetxOffSet, short bulletOffSet) { LittleEndian.PutShort(_data, 4, tetxOffSet); LittleEndian.PutShort(_data, 6, bulletOffSet); LittleEndian.PutShort(_data, 8, bulletOffSet); } } }
#region License /* * Copyright 2002-2010 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #endregion #region Imports using System; using System.Collections; using Spring.Objects.Factory.Config; #endregion namespace Spring.Objects.Factory.Support { /// <summary> /// Programmatic means of constructing a <see cref="IObjectDefinition"/> using the builder pattern. Intended primarily /// for use when implementing custom namespace parsers. /// </summary> /// <remarks>Set methods are used instead of properties, so that chaining of methods can be used to create /// 'one-liner'definitions that set multiple properties at one.</remarks> /// <author>Rod Johnson</author> /// <author>Rob Harrop</author> /// <author>Juergen Hoeller</author> /// <author>Mark Pollack (.NET)</author> public class ObjectDefinitionBuilder { #region Fields private AbstractObjectDefinition objectDefinition; private IObjectDefinitionFactory objectDefinitionFactory; private int constructorArgIndex; #endregion #region Constructor(s) /// <summary> /// Initializes a new instance of the <see cref="ObjectDefinitionBuilder"/> class, private /// to force use of factory methods. /// </summary> private ObjectDefinitionBuilder() { } #endregion #region Factory Methods /// <summary> /// Creates a new <see cref="ObjectDefinitionBuilder"/> used to construct a <see cref="Spring.Objects.Factory.Support.GenericObjectDefinition"/>. /// </summary> public static ObjectDefinitionBuilder GenericObjectDefinition() { ObjectDefinitionBuilder builder = new ObjectDefinitionBuilder(); builder.objectDefinition = new GenericObjectDefinition(); return builder; } /// <summary> /// Creates a new <see cref="ObjectDefinitionBuilder"/> used to construct a <see cref="Spring.Objects.Factory.Support.GenericObjectDefinition"/>. /// </summary> /// <param name="objectType">the <see cref="Type"/> of the object that the definition is being created for</param> public static ObjectDefinitionBuilder GenericObjectDefinition(Type objectType) { ObjectDefinitionBuilder builder = new ObjectDefinitionBuilder(); builder.objectDefinition = new GenericObjectDefinition(); builder.objectDefinition.ObjectType = objectType; return builder; } /// <summary> /// Creates a new <see cref="ObjectDefinitionBuilder"/> used to construct a <see cref="Spring.Objects.Factory.Support.GenericObjectDefinition"/>. /// </summary> /// <param name="objectTypeName">the name of the <see cref="Type"/> of the object that the definition is being created for</param> public static ObjectDefinitionBuilder GenericObjectDefinition(string objectTypeName) { ObjectDefinitionBuilder builder = new ObjectDefinitionBuilder(); builder.objectDefinition = new GenericObjectDefinition(); builder.objectDefinition.ObjectTypeName = objectTypeName; return builder; } /// <summary> /// Create a new <code>ObjectDefinitionBuilder</code> used to construct a root object definition. /// </summary> /// <param name="objectDefinitionFactory">The object definition factory.</param> /// <param name="objectTypeName">The type name of the object.</param> /// <returns>A new <code>ObjectDefinitionBuilder</code> instance.</returns> public static ObjectDefinitionBuilder RootObjectDefinition(IObjectDefinitionFactory objectDefinitionFactory, string objectTypeName) { return RootObjectDefinition(objectDefinitionFactory, objectTypeName, null); } /// <summary> /// Create a new <code>ObjectDefinitionBuilder</code> used to construct a root object definition. /// </summary> /// <param name="objectDefinitionFactory">The object definition factory.</param> /// <param name="objectTypeName">Name of the object type.</param> /// <param name="factoryMethodName">Name of the factory method.</param> /// <returns>A new <code>ObjectDefinitionBuilder</code> instance.</returns> public static ObjectDefinitionBuilder RootObjectDefinition(IObjectDefinitionFactory objectDefinitionFactory, string objectTypeName, string factoryMethodName) { ObjectDefinitionBuilder builder = new ObjectDefinitionBuilder(); builder.objectDefinitionFactory = objectDefinitionFactory; // Pass in null for parent name and also AppDomain to force object definition to be register by name and not type. builder.objectDefinition = objectDefinitionFactory.CreateObjectDefinition(objectTypeName, null, null); builder.objectDefinition.FactoryMethodName = factoryMethodName; return builder; } /// <summary> /// Create a new <code>ObjectDefinitionBuilder</code> used to construct a root object definition. /// </summary> /// <param name="objectDefinitionFactory">The object definition factory.</param> /// <param name="objectType">Type of the object.</param> /// <returns>A new <code>ObjectDefinitionBuilder</code> instance.</returns> public static ObjectDefinitionBuilder RootObjectDefinition(IObjectDefinitionFactory objectDefinitionFactory, Type objectType) { return RootObjectDefinition(objectDefinitionFactory, objectType, null); } /// <summary> /// Create a new <code>ObjectDefinitionBuilder</code> used to construct a root object definition. /// </summary> /// <param name="objectDefinitionFactory">The object definition factory.</param> /// <param name="objectType">Type of the object.</param> /// <param name="factoryMethodName">Name of the factory method.</param> /// <returns>A new <code>ObjectDefinitionBuilder</code> instance.</returns> public static ObjectDefinitionBuilder RootObjectDefinition(IObjectDefinitionFactory objectDefinitionFactory, Type objectType, string factoryMethodName) { ObjectDefinitionBuilder builder = new ObjectDefinitionBuilder(); builder.objectDefinitionFactory = objectDefinitionFactory; builder.objectDefinition = objectDefinitionFactory.CreateObjectDefinition(objectType.FullName, null, AppDomain.CurrentDomain); builder.objectDefinition.ObjectType = objectType; builder.objectDefinition.FactoryMethodName = factoryMethodName; return builder; } /// <summary> /// Create a new <code>ObjectDefinitionBuilder</code> used to construct a child object definition.. /// </summary> /// <param name="objectDefinitionFactory">The object definition factory.</param> /// <param name="parentObjectName">Name of the parent object.</param> /// <returns></returns> public static ObjectDefinitionBuilder ChildObjectDefinition(IObjectDefinitionFactory objectDefinitionFactory, string parentObjectName) { ObjectDefinitionBuilder builder = new ObjectDefinitionBuilder(); builder.objectDefinitionFactory = objectDefinitionFactory; builder.objectDefinition = objectDefinitionFactory.CreateObjectDefinition(null, parentObjectName, AppDomain.CurrentDomain); return builder; } #endregion #region Properties /// <summary> /// Gets the current object definition in its raw (unvalidated) form. /// </summary> /// <value>The raw object definition.</value> public AbstractObjectDefinition RawObjectDefinition { get { return objectDefinition; } } /// <summary> /// Validate and gets the object definition. /// </summary> /// <value>The object definition.</value> public AbstractObjectDefinition ObjectDefinition { get { objectDefinition.Validate(); return objectDefinition; } } #endregion #region Methods //TODO add expression support. /// <summary> /// Adds the property value under the given name. /// </summary> /// <param name="name">The name.</param> /// <param name="value">The value.</param> /// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns> public ObjectDefinitionBuilder AddPropertyValue(string name, object value) { objectDefinition.PropertyValues.Add(new PropertyValue(name, value)); return this; } /// <summary> /// Adds a reference to the specified object name under the property specified. /// </summary> /// <param name="name">The name.</param> /// <param name="objectName">Name of the object.</param> /// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns> public ObjectDefinitionBuilder AddPropertyReference(string name, string objectName) { objectDefinition.PropertyValues.Add(new PropertyValue(name, new RuntimeObjectReference(objectName))); return this; } /// <summary> /// Adds an index constructor arg value. The current index is tracked internally and all addtions are /// at the present point /// </summary> /// <param name="value">The constructor arg value.</param> /// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns> public ObjectDefinitionBuilder AddConstructorArg(object value) { objectDefinition.ConstructorArgumentValues.AddIndexedArgumentValue(constructorArgIndex++,value); return this; } /// <summary> /// Adds a reference to the named object as a constructor argument. /// </summary> /// <param name="objectName">Name of the object.</param> /// <returns></returns> public ObjectDefinitionBuilder AddConstructorArgReference(string objectName) { return AddConstructorArg(new RuntimeObjectReference(objectName)); } /// <summary> /// Sets the name of the factory method to use for this definition. /// </summary> /// <param name="factoryMethod">The factory method.</param> /// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns> public ObjectDefinitionBuilder SetFactoryMethod(string factoryMethod) { objectDefinition.FactoryMethodName = factoryMethod; return this; } /// <summary> /// Sets the name of the factory object to use for this definition. /// </summary> /// <param name="factoryObject">The factory object.</param> /// <param name="factoryMethod">The factory method.</param> /// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns> public ObjectDefinitionBuilder SetFactoryObject(string factoryObject, string factoryMethod) { objectDefinition.FactoryObjectName = factoryObject; objectDefinition.FactoryMethodName = factoryMethod; return this; } /// <summary> /// Sets whether or not this definition describes a singleton object. /// </summary> /// <param name="singleton">if set to <c>true</c> [singleton].</param> /// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns> public ObjectDefinitionBuilder SetSingleton(bool singleton) { objectDefinition.IsSingleton = singleton; return this; } /// <summary> /// Sets whether objects or not this definition is abstract. /// </summary> /// <param name="flag">if set to <c>true</c> [flag].</param> /// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns> public ObjectDefinitionBuilder SetAbstract(bool flag) { objectDefinition.IsAbstract = flag; return this; } /// <summary> /// Sets whether objects for this definition should be lazily initialized or not. /// </summary> /// <param name="lazy">if set to <c>true</c> [lazy].</param> /// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns> public ObjectDefinitionBuilder SetLazyInit(bool lazy) { objectDefinition.IsLazyInit = lazy; return this; } /// <summary> /// Sets the autowire mode for this definition. /// </summary> /// <param name="autowireMode">The autowire mode.</param> /// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns> public ObjectDefinitionBuilder SetAutowireMode(AutoWiringMode autowireMode) { objectDefinition.AutowireMode = autowireMode; return this; } /// <summary> /// Sets the dependency check mode for this definition. /// </summary> /// <param name="dependencyCheck">The dependency check.</param> /// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns> public ObjectDefinitionBuilder SetDependencyCheck(DependencyCheckingMode dependencyCheck) { objectDefinition.DependencyCheck = dependencyCheck; return this; } /// <summary> /// Sets the name of the destroy method for this definition. /// </summary> /// <param name="methodName">Name of the method.</param> /// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns> public ObjectDefinitionBuilder SetDestroyMethodName(string methodName) { objectDefinition.DestroyMethodName = methodName; return this; } /// <summary> /// Sets the name of the init method for this definition. /// </summary> /// <param name="methodName">Name of the method.</param> /// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns> public ObjectDefinitionBuilder SetInitMethodName(string methodName) { objectDefinition.InitMethodName = methodName; return this; } /// <summary> /// Sets the resource description for this definition. /// </summary> /// <param name="resourceDescription">The resource description.</param> /// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns> public ObjectDefinitionBuilder SetResourceDescription(string resourceDescription) { objectDefinition.ResourceDescription = resourceDescription; return this; } /// <summary> /// Adds the specified object name to the list of objects that this definition depends on. /// </summary> /// <param name="objectName">Name of the object.</param> /// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns> public ObjectDefinitionBuilder AddDependsOn(string objectName) { if (objectDefinition.DependsOn == null) { objectDefinition.DependsOn = new string[] {objectName}; } else { ArrayList arrayList = new ArrayList(); arrayList.AddRange(objectDefinition.DependsOn); arrayList.AddRange(new string[]{ objectName}); objectDefinition.DependsOn = (string[])arrayList.ToArray(typeof(string)); } return this; } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Buffers; using System.Diagnostics; using System.Diagnostics.Contracts; using System.Security; using ZErrorCode = System.IO.Compression.ZLibNative.ErrorCode; using ZFlushCode = System.IO.Compression.ZLibNative.FlushCode; namespace System.IO.Compression { /// <summary> /// Provides a wrapper around the ZLib compression API. /// </summary> internal sealed class Deflater : IDisposable { private ZLibNative.ZLibStreamHandle _zlibStream; private MemoryHandle _inputBufferHandle; private bool _isDisposed; private const int minWindowBits = -15; // WindowBits must be between -8..-15 to write no header, 8..15 for a private const int maxWindowBits = 31; // zlib header, or 24..31 for a GZip header // Note, DeflateStream or the deflater do not try to be thread safe. // The lock is just used to make writing to unmanaged structures atomic to make sure // that they do not get inconsistent fields that may lead to an unmanaged memory violation. // To prevent *managed* buffer corruption or other weird behaviour users need to synchronise // on the stream explicitly. private object SyncLock => this; internal Deflater(CompressionLevel compressionLevel, int windowBits) { Debug.Assert(windowBits >= minWindowBits && windowBits <= maxWindowBits); ZLibNative.CompressionLevel zlibCompressionLevel; int memLevel; switch (compressionLevel) { // See the note in ZLibNative.CompressionLevel for the recommended combinations. case CompressionLevel.Optimal: zlibCompressionLevel = ZLibNative.CompressionLevel.DefaultCompression; memLevel = ZLibNative.Deflate_DefaultMemLevel; break; case CompressionLevel.Fastest: zlibCompressionLevel = ZLibNative.CompressionLevel.BestSpeed; memLevel = ZLibNative.Deflate_DefaultMemLevel; break; case CompressionLevel.NoCompression: zlibCompressionLevel = ZLibNative.CompressionLevel.NoCompression; memLevel = ZLibNative.Deflate_NoCompressionMemLevel; break; default: throw new ArgumentOutOfRangeException(nameof(compressionLevel)); } ZLibNative.CompressionStrategy strategy = ZLibNative.CompressionStrategy.DefaultStrategy; DeflateInit(zlibCompressionLevel, windowBits, memLevel, strategy); } ~Deflater() { Dispose(false); } public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } [SecuritySafeCritical] private void Dispose(bool disposing) { if (!_isDisposed) { if (disposing) _zlibStream.Dispose(); DeallocateInputBufferHandle(); _isDisposed = true; } } public bool NeedsInput() => 0 == _zlibStream.AvailIn; internal unsafe void SetInput(ReadOnlyMemory<byte> inputBuffer) { Debug.Assert(NeedsInput(), "We have something left in previous input!"); Debug.Assert(_inputBufferHandle.PinnedPointer == null); if (0 == inputBuffer.Length) { return; } lock (SyncLock) { _inputBufferHandle = inputBuffer.Retain(pin: true); _zlibStream.NextIn = (IntPtr)_inputBufferHandle.PinnedPointer; _zlibStream.AvailIn = (uint)inputBuffer.Length; } } internal unsafe void SetInput(byte* inputBufferPtr, int count) { Debug.Assert(NeedsInput(), "We have something left in previous input!"); Debug.Assert(inputBufferPtr != null); Debug.Assert(_inputBufferHandle.PinnedPointer == null); if (count == 0) { return; } lock (SyncLock) { _zlibStream.NextIn = (IntPtr)inputBufferPtr; _zlibStream.AvailIn = (uint)count; } } internal int GetDeflateOutput(byte[] outputBuffer) { Contract.Ensures(Contract.Result<int>() >= 0 && Contract.Result<int>() <= outputBuffer.Length); Debug.Assert(null != outputBuffer, "Can't pass in a null output buffer!"); Debug.Assert(!NeedsInput(), "GetDeflateOutput should only be called after providing input"); try { int bytesRead; ReadDeflateOutput(outputBuffer, ZFlushCode.NoFlush, out bytesRead); return bytesRead; } finally { // Before returning, make sure to release input buffer if necessary: if (0 == _zlibStream.AvailIn) { DeallocateInputBufferHandle(); } } } private unsafe ZErrorCode ReadDeflateOutput(byte[] outputBuffer, ZFlushCode flushCode, out int bytesRead) { Debug.Assert(outputBuffer?.Length > 0); lock (SyncLock) { fixed (byte* bufPtr = &outputBuffer[0]) { _zlibStream.NextOut = (IntPtr)bufPtr; _zlibStream.AvailOut = (uint)outputBuffer.Length; ZErrorCode errC = Deflate(flushCode); bytesRead = outputBuffer.Length - (int)_zlibStream.AvailOut; return errC; } } } internal bool Finish(byte[] outputBuffer, out int bytesRead) { Debug.Assert(null != outputBuffer, "Can't pass in a null output buffer!"); Debug.Assert(outputBuffer.Length > 0, "Can't pass in an empty output buffer!"); Debug.Assert(NeedsInput(), "We have something left in previous input!"); unsafe { Debug.Assert(_inputBufferHandle.PinnedPointer == null); } // Note: we require that NeedsInput() == true, i.e. that 0 == _zlibStream.AvailIn. // If there is still input left we should never be getting here; instead we // should be calling GetDeflateOutput. ZErrorCode errC = ReadDeflateOutput(outputBuffer, ZFlushCode.Finish, out bytesRead); return errC == ZErrorCode.StreamEnd; } /// <summary> /// Returns true if there was something to flush. Otherwise False. /// </summary> internal bool Flush(byte[] outputBuffer, out int bytesRead) { Debug.Assert(null != outputBuffer, "Can't pass in a null output buffer!"); Debug.Assert(outputBuffer.Length > 0, "Can't pass in an empty output buffer!"); Debug.Assert(NeedsInput(), "We have something left in previous input!"); unsafe { Debug.Assert(_inputBufferHandle.PinnedPointer == null); } // Note: we require that NeedsInput() == true, i.e. that 0 == _zlibStream.AvailIn. // If there is still input left we should never be getting here; instead we // should be calling GetDeflateOutput. return ReadDeflateOutput(outputBuffer, ZFlushCode.SyncFlush, out bytesRead) == ZErrorCode.Ok; } private void DeallocateInputBufferHandle() { lock (SyncLock) { _zlibStream.AvailIn = 0; _zlibStream.NextIn = ZLibNative.ZNullPtr; _inputBufferHandle.Dispose(); } } [SecuritySafeCritical] private void DeflateInit(ZLibNative.CompressionLevel compressionLevel, int windowBits, int memLevel, ZLibNative.CompressionStrategy strategy) { ZErrorCode errC; try { errC = ZLibNative.CreateZLibStreamForDeflate(out _zlibStream, compressionLevel, windowBits, memLevel, strategy); } catch (Exception cause) { throw new ZLibException(SR.ZLibErrorDLLLoadError, cause); } switch (errC) { case ZErrorCode.Ok: return; case ZErrorCode.MemError: throw new ZLibException(SR.ZLibErrorNotEnoughMemory, "deflateInit2_", (int)errC, _zlibStream.GetErrorMessage()); case ZErrorCode.VersionError: throw new ZLibException(SR.ZLibErrorVersionMismatch, "deflateInit2_", (int)errC, _zlibStream.GetErrorMessage()); case ZErrorCode.StreamError: throw new ZLibException(SR.ZLibErrorIncorrectInitParameters, "deflateInit2_", (int)errC, _zlibStream.GetErrorMessage()); default: throw new ZLibException(SR.ZLibErrorUnexpected, "deflateInit2_", (int)errC, _zlibStream.GetErrorMessage()); } } [SecuritySafeCritical] private ZErrorCode Deflate(ZFlushCode flushCode) { ZErrorCode errC; try { errC = _zlibStream.Deflate(flushCode); } catch (Exception cause) { throw new ZLibException(SR.ZLibErrorDLLLoadError, cause); } switch (errC) { case ZErrorCode.Ok: case ZErrorCode.StreamEnd: return errC; case ZErrorCode.BufError: return errC; // This is a recoverable error case ZErrorCode.StreamError: throw new ZLibException(SR.ZLibErrorInconsistentStream, "deflate", (int)errC, _zlibStream.GetErrorMessage()); default: throw new ZLibException(SR.ZLibErrorUnexpected, "deflate", (int)errC, _zlibStream.GetErrorMessage()); } } } }
/* * * (c) Copyright Ascensio System Limited 2010-2021 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ using System; using System.Collections.Generic; using System.Linq; using System.Security; using ASC.Core; using ASC.Core.Users; using ASC.Files.Core; using ASC.Files.Core.Security; using ASC.Web.Core.Files; using ASC.Web.Files.Classes; using ASC.Web.Files.Resources; using ASC.Web.Files.Services.DocumentService; using ASC.Web.Files.Services.NotifyService; using ASC.Web.Files.Services.WCFService; using ASC.Web.Studio.Utility; using SecurityContext = ASC.Core.SecurityContext; namespace ASC.Web.Files.Utils { public static class FileSharing { public static bool CanSetAccess(FileEntry entry) { return entry != null && (entry.RootFolderType == FolderType.COMMON && Global.IsAdministrator || !CoreContext.UserManager.GetUsers(SecurityContext.CurrentAccount.ID).IsVisitor() && (entry.RootFolderType == FolderType.USER && (Equals(entry.RootFolderId, Global.FolderMy) || Global.GetFilesSecurity().CanEdit(entry)) || entry.RootFolderType == FolderType.Privacy && entry is File && (Equals(entry.RootFolderId, Global.FolderPrivacy) || Global.GetFilesSecurity().CanEdit(entry)))); } public static List<AceWrapper> GetSharedInfo(FileEntry entry) { if (entry == null) throw new ArgumentNullException(FilesCommonResource.ErrorMassage_BadRequest); if (!CanSetAccess(entry)) { Global.Logger.ErrorFormat("User {0} can't get shared info for {1} {2}", SecurityContext.CurrentAccount.ID, (entry.FileEntryType == FileEntryType.File ? "file" : "folder"), entry.ID); throw new SecurityException(FilesCommonResource.ErrorMassage_SecurityException); } var linkAccess = FileShare.Restrict; var result = new List<AceWrapper>(); var fileSecurity = Global.GetFilesSecurity(); var records = fileSecurity .GetShares(entry) .GroupBy(r => r.Subject) .Select(g => g.OrderBy(r => r.Level) .ThenBy(r => r.Level) .ThenByDescending(r => r.Share, new FileShareRecord.ShareComparer()).FirstOrDefault()); foreach (var r in records) { if (r.Subject == FileConstant.ShareLinkId) { linkAccess = r.Share; continue; } var u = CoreContext.UserManager.GetUsers(r.Subject); var isgroup = false; var title = u.DisplayUserName(false); var share = r.Share; if (u.ID == Constants.LostUser.ID) { var g = CoreContext.UserManager.GetGroupInfo(r.Subject); isgroup = true; title = g.Name; if (g.ID == Constants.GroupAdmin.ID) title = FilesCommonResource.Admin; if (g.ID == Constants.GroupEveryone.ID) title = FilesCommonResource.Everyone; if (g.ID == Constants.LostGroupInfo.ID) { fileSecurity.RemoveSubject(r.Subject); continue; } } else if (u.IsVisitor() && new FileShareRecord.ShareComparer().Compare(FileShare.Read, share) > 0) { share = FileShare.Read; } var w = new AceWrapper { SubjectId = r.Subject, SubjectName = title, SubjectGroup = isgroup, Share = share, Owner = entry.RootFolderType == FolderType.USER ? entry.RootFolderCreator == r.Subject : entry.CreateBy == r.Subject, LockedRights = r.Subject == SecurityContext.CurrentAccount.ID }; result.Add(w); } if (entry.FileEntryType == FileEntryType.File && !((File)entry).Encrypted && result.All(w => w.SubjectId != FileConstant.ShareLinkId) && (linkAccess != FileShare.Restrict || CoreContext.Configuration.Standalone || !TenantExtra.GetTenantQuota().Trial || FileUtility.CanWebView(entry.Title))) { var w = new AceWrapper { SubjectId = FileConstant.ShareLinkId, Link = FileShareLink.GetLink((File)entry), SubjectGroup = true, Share = linkAccess, Owner = false }; result.Add(w); } if (!result.Any(w => w.Owner)) { var ownerId = entry.RootFolderType == FolderType.USER ? entry.RootFolderCreator : entry.CreateBy; var w = new AceWrapper { SubjectId = ownerId, SubjectName = Global.GetUserName(ownerId), SubjectGroup = false, Share = FileShare.ReadWrite, Owner = true }; result.Add(w); } if (result.Any(w => w.SubjectId == SecurityContext.CurrentAccount.ID)) { result.Single(w => w.SubjectId == SecurityContext.CurrentAccount.ID).LockedRights = true; } if (entry.RootFolderType == FolderType.COMMON) { if (result.All(w => w.SubjectId != Constants.GroupAdmin.ID)) { var w = new AceWrapper { SubjectId = Constants.GroupAdmin.ID, SubjectName = FilesCommonResource.Admin, SubjectGroup = true, Share = FileShare.ReadWrite, Owner = false, LockedRights = true, }; result.Add(w); } if (result.All(w => w.SubjectId != Constants.GroupEveryone.ID)) { var w = new AceWrapper { SubjectId = Constants.GroupEveryone.ID, SubjectName = FilesCommonResource.Everyone, SubjectGroup = true, Share = fileSecurity.DefaultCommonShare, Owner = false, DisableRemove = true }; result.Add(w); } } return result; } public static bool SetAceObject(List<AceWrapper> aceWrappers, FileEntry entry, bool notify, string message) { if (entry == null) throw new ArgumentNullException(FilesCommonResource.ErrorMassage_BadRequest); if (!CanSetAccess(entry)) throw new SecurityException(FilesCommonResource.ErrorMassage_SecurityException); var fileSecurity = Global.GetFilesSecurity(); var entryType = entry.FileEntryType; var recipients = new Dictionary<Guid, FileShare>(); var usersWithoutRight = new List<Guid>(); var changed = false; foreach (var w in aceWrappers.OrderByDescending(ace => ace.SubjectGroup)) { var subjects = fileSecurity.GetUserSubjects(w.SubjectId); var ownerId = entry.RootFolderType == FolderType.USER ? entry.RootFolderCreator : entry.CreateBy; if (entry.RootFolderType == FolderType.COMMON && subjects.Contains(Constants.GroupAdmin.ID) || ownerId == w.SubjectId) continue; var share = w.Share; if (w.SubjectId == FileConstant.ShareLinkId) { if (w.Share == FileShare.ReadWrite && CoreContext.UserManager.GetUsers(SecurityContext.CurrentAccount.ID).IsVisitor()) throw new SecurityException(FilesCommonResource.ErrorMassage_SecurityException); // only editable format on personal if (CoreContext.Configuration.Personal && !FileUtility.CanWebView(entry.Title) && w.Share != FileShare.Restrict) throw new SecurityException(FilesCommonResource.ErrorMassage_BadRequest); // only editable format on SaaS trial if (w.Share != FileShare.Restrict && !CoreContext.Configuration.Standalone && TenantExtra.GetTenantQuota().Trial && !FileUtility.CanWebView(entry.Title)) throw new SecurityException(FilesCommonResource.ErrorMassage_BadRequest); share = w.Share == FileShare.Restrict ? FileShare.None : w.Share; } fileSecurity.Share(entry.ID, entryType, w.SubjectId, share); changed = true; if (w.SubjectId == FileConstant.ShareLinkId) continue; entry.Access = share; var listUsersId = new List<Guid>(); if (w.SubjectGroup) listUsersId = CoreContext.UserManager.GetUsersByGroup(w.SubjectId).Select(ui => ui.ID).ToList(); else listUsersId.Add(w.SubjectId); listUsersId.Remove(SecurityContext.CurrentAccount.ID); if (entryType == FileEntryType.File) { listUsersId.ForEach(uid => FileTracker.ChangeRight(entry.ID, uid, true)); } var addRecipient = share == FileShare.Read || share == FileShare.CustomFilter || share == FileShare.ReadWrite || share == FileShare.Review || share == FileShare.FillForms || share == FileShare.Comment || share == FileShare.None && entry.RootFolderType == FolderType.COMMON; var removeNew = share == FileShare.None && entry.RootFolderType == FolderType.USER || share == FileShare.Restrict; listUsersId.ForEach(id => { recipients.Remove(id); if (addRecipient) { recipients.Add(id, share); } else if (removeNew) { usersWithoutRight.Add(id); } }); } if (entryType == FileEntryType.File) { DocumentServiceHelper.CheckUsersForDrop((File)entry); } if (recipients.Any()) { if (entryType == FileEntryType.File || ((Folder)entry).TotalSubFolders + ((Folder)entry).TotalFiles > 0 || entry.ProviderEntry) { FileMarker.MarkAsNew(entry, recipients.Keys.ToList()); } if ((entry.RootFolderType == FolderType.USER || entry.RootFolderType == FolderType.Privacy) && notify) { NotifyClient.SendShareNotice(entry, recipients, message); } } usersWithoutRight.ForEach(userId => FileMarker.RemoveMarkAsNew(entry, userId)); return changed; } public static void RemoveAce(List<FileEntry> entries) { var fileSecurity = Global.GetFilesSecurity(); entries.ForEach( entry => { if (entry.RootFolderType != FolderType.USER && entry.RootFolderType != FolderType.Privacy || Equals(entry.RootFolderId, Global.FolderMy) || Equals(entry.RootFolderId, Global.FolderPrivacy)) return; var entryType = entry.FileEntryType; fileSecurity.Share(entry.ID, entryType, SecurityContext.CurrentAccount.ID, entry.RootFolderType == FolderType.USER ? fileSecurity.DefaultMyShare : fileSecurity.DefaultPrivacyShare); if (entryType == FileEntryType.File) { DocumentServiceHelper.CheckUsersForDrop((File)entry); } FileMarker.RemoveMarkAsNew(entry); }); } } }
using System; using System.Collections.Generic; using System.Diagnostics; using System.Text; namespace Lucene.Net.Search { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using AtomicReader = Lucene.Net.Index.AtomicReader; using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext; using IBits = Lucene.Net.Util.IBits; using DocsEnum = Lucene.Net.Index.DocsEnum; using IndexReaderContext = Lucene.Net.Index.IndexReaderContext; using ReaderUtil = Lucene.Net.Index.ReaderUtil; using Similarity = Lucene.Net.Search.Similarities.Similarity; using SimScorer = Lucene.Net.Search.Similarities.Similarity.SimScorer; using Term = Lucene.Net.Index.Term; using TermContext = Lucene.Net.Index.TermContext; using TermsEnum = Lucene.Net.Index.TermsEnum; using TermState = Lucene.Net.Index.TermState; using ToStringUtils = Lucene.Net.Util.ToStringUtils; /// <summary> /// A <see cref="Query"/> that matches documents containing a term. /// this may be combined with other terms with a <see cref="BooleanQuery"/>. /// </summary> #if FEATURE_SERIALIZABLE [Serializable] #endif public class TermQuery : Query { private readonly Term term; private readonly int docFreq; private readonly TermContext perReaderTermState; internal sealed class TermWeight : Weight { private readonly TermQuery outerInstance; internal readonly Similarity similarity; internal readonly Similarity.SimWeight stats; internal readonly TermContext termStates; public TermWeight(TermQuery outerInstance, IndexSearcher searcher, TermContext termStates) { this.outerInstance = outerInstance; Debug.Assert(termStates != null, "TermContext must not be null"); this.termStates = termStates; this.similarity = searcher.Similarity; this.stats = similarity.ComputeWeight(outerInstance.Boost, searcher.CollectionStatistics(outerInstance.term.Field), searcher.TermStatistics(outerInstance.term, termStates)); } public override string ToString() { return "weight(" + outerInstance + ")"; } public override Query Query => outerInstance; public override float GetValueForNormalization() { return stats.GetValueForNormalization(); } public override void Normalize(float queryNorm, float topLevelBoost) { stats.Normalize(queryNorm, topLevelBoost); } public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) { Debug.Assert(termStates.TopReaderContext == ReaderUtil.GetTopLevelContext(context), "The top-reader used to create Weight (" + termStates.TopReaderContext + ") is not the same as the current reader's top-reader (" + ReaderUtil.GetTopLevelContext(context)); TermsEnum termsEnum = GetTermsEnum(context); if (termsEnum == null) { return null; } DocsEnum docs = termsEnum.Docs(acceptDocs, null); Debug.Assert(docs != null); return new TermScorer(this, docs, similarity.GetSimScorer(stats, context)); } /// <summary> /// Returns a <see cref="TermsEnum"/> positioned at this weights <see cref="Index.Term"/> or <c>null</c> if /// the term does not exist in the given context. /// </summary> private TermsEnum GetTermsEnum(AtomicReaderContext context) { TermState state = termStates.Get(context.Ord); if (state == null) // term is not present in that reader { Debug.Assert(TermNotInReader(context.AtomicReader, outerInstance.term), "no termstate found but term exists in reader term=" + outerInstance.term); return null; } //System.out.println("LD=" + reader.getLiveDocs() + " set?=" + (reader.getLiveDocs() != null ? reader.getLiveDocs().get(0) : "null")); TermsEnum termsEnum = context.AtomicReader.GetTerms(outerInstance.term.Field).GetIterator(null); termsEnum.SeekExact(outerInstance.term.Bytes, state); return termsEnum; } private bool TermNotInReader(AtomicReader reader, Term term) { // only called from assert //System.out.println("TQ.termNotInReader reader=" + reader + " term=" + field + ":" + bytes.utf8ToString()); return reader.DocFreq(term) == 0; } public override Explanation Explain(AtomicReaderContext context, int doc) { Scorer scorer = GetScorer(context, context.AtomicReader.LiveDocs); if (scorer != null) { int newDoc = scorer.Advance(doc); if (newDoc == doc) { float freq = scorer.Freq; SimScorer docScorer = similarity.GetSimScorer(stats, context); ComplexExplanation result = new ComplexExplanation(); result.Description = "weight(" + Query + " in " + doc + ") [" + similarity.GetType().Name + "], result of:"; Explanation scoreExplanation = docScorer.Explain(doc, new Explanation(freq, "termFreq=" + freq)); result.AddDetail(scoreExplanation); result.Value = scoreExplanation.Value; result.Match = true; return result; } } return new ComplexExplanation(false, 0.0f, "no matching term"); } } /// <summary> /// Constructs a query for the term <paramref name="t"/>. </summary> public TermQuery(Term t) : this(t, -1) { } /// <summary> /// Expert: constructs a <see cref="TermQuery"/> that will use the /// provided <paramref name="docFreq"/> instead of looking up the docFreq /// against the searcher. /// </summary> public TermQuery(Term t, int docFreq) { term = t; this.docFreq = docFreq; perReaderTermState = null; } /// <summary> /// Expert: constructs a <see cref="TermQuery"/> that will use the /// provided docFreq instead of looking up the docFreq /// against the searcher. /// </summary> public TermQuery(Term t, TermContext states) { Debug.Assert(states != null); term = t; docFreq = states.DocFreq; perReaderTermState = states; } /// <summary> /// Returns the term of this query. </summary> public virtual Term Term => term; public override Weight CreateWeight(IndexSearcher searcher) { IndexReaderContext context = searcher.TopReaderContext; TermContext termState; if (perReaderTermState == null || perReaderTermState.TopReaderContext != context) { // make TermQuery single-pass if we don't have a PRTS or if the context differs! termState = TermContext.Build(context, term); } else { // PRTS was pre-build for this IS termState = this.perReaderTermState; } // we must not ignore the given docFreq - if set use the given value (lie) if (docFreq != -1) { termState.DocFreq = docFreq; } return new TermWeight(this, searcher, termState); } public override void ExtractTerms(ISet<Term> terms) { terms.Add(Term); } /// <summary> /// Prints a user-readable version of this query. </summary> public override string ToString(string field) { StringBuilder buffer = new StringBuilder(); if (!term.Field.Equals(field, StringComparison.Ordinal)) { buffer.Append(term.Field); buffer.Append(":"); } buffer.Append(term.Text()); buffer.Append(ToStringUtils.Boost(Boost)); return buffer.ToString(); } /// <summary> /// Returns <c>true</c> if <paramref name="o"/> is equal to this. </summary> public override bool Equals(object o) { if (!(o is TermQuery)) { return false; } TermQuery other = (TermQuery)o; return (this.Boost == other.Boost) && this.term.Equals(other.term); } /// <summary> /// Returns a hash code value for this object. </summary> public override int GetHashCode() { return J2N.BitConversion.SingleToInt32Bits(Boost) ^ term.GetHashCode(); } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.ServiceModel; using System.IO; using System.Text; using System.Globalization; using System.Runtime; namespace System.ServiceModel.Channels { internal static class DecoderHelper { public static void ValidateSize(int size) { if (size <= 0) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentOutOfRangeException("size", size, SR.ValueMustBePositive)); } } } internal struct IntDecoder { private int _value; private short _index; private bool _isValueDecoded; private const int LastIndex = 4; public int Value { get { if (!_isValueDecoded) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidOperationException(SR.FramingValueNotAvailable)); return _value; } } public bool IsValueDecoded { get { return _isValueDecoded; } } public void Reset() { _index = 0; _value = 0; _isValueDecoded = false; } public int Decode(byte[] buffer, int offset, int size) { DecoderHelper.ValidateSize(size); if (_isValueDecoded) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidOperationException(SR.FramingValueNotAvailable)); } int bytesConsumed = 0; while (bytesConsumed < size) { int next = buffer[offset]; _value |= (next & 0x7F) << (_index * 7); bytesConsumed++; if (_index == LastIndex && (next & 0xF8) != 0) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidDataException(SR.FramingSizeTooLarge)); } _index++; if ((next & 0x80) == 0) { _isValueDecoded = true; break; } offset++; } return bytesConsumed; } } internal abstract class StringDecoder { private int _encodedSize; private byte[] _encodedBytes; private int _bytesNeeded; private string _value; private State _currentState; private IntDecoder _sizeDecoder; private int _sizeQuota; private int _valueLengthInBytes; public StringDecoder(int sizeQuota) { _sizeQuota = sizeQuota; _sizeDecoder = new IntDecoder(); _currentState = State.ReadingSize; Reset(); } public bool IsValueDecoded { get { return _currentState == State.Done; } } public string Value { get { if (_currentState != State.Done) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidOperationException(SR.FramingValueNotAvailable)); return _value; } } public int Decode(byte[] buffer, int offset, int size) { DecoderHelper.ValidateSize(size); int bytesConsumed; switch (_currentState) { case State.ReadingSize: bytesConsumed = _sizeDecoder.Decode(buffer, offset, size); if (_sizeDecoder.IsValueDecoded) { _encodedSize = _sizeDecoder.Value; if (_encodedSize > _sizeQuota) { Exception quotaExceeded = OnSizeQuotaExceeded(_encodedSize); throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(quotaExceeded); } if (_encodedBytes == null || _encodedBytes.Length < _encodedSize) { _encodedBytes = Fx.AllocateByteArray(_encodedSize); _value = null; } _currentState = State.ReadingBytes; _bytesNeeded = _encodedSize; } break; case State.ReadingBytes: if (_value != null && _valueLengthInBytes == _encodedSize && _bytesNeeded == _encodedSize && size >= _encodedSize && CompareBuffers(_encodedBytes, buffer, offset)) { bytesConsumed = _bytesNeeded; OnComplete(_value); } else { bytesConsumed = _bytesNeeded; if (size < _bytesNeeded) bytesConsumed = size; Buffer.BlockCopy(buffer, offset, _encodedBytes, _encodedSize - _bytesNeeded, bytesConsumed); _bytesNeeded -= bytesConsumed; if (_bytesNeeded == 0) { _value = Encoding.UTF8.GetString(_encodedBytes, 0, _encodedSize); _valueLengthInBytes = _encodedSize; OnComplete(_value); } } break; default: throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidDataException(SR.InvalidDecoderStateMachine)); } return bytesConsumed; } protected virtual void OnComplete(string value) { _currentState = State.Done; } private static bool CompareBuffers(byte[] buffer1, byte[] buffer2, int offset) { for (int i = 0; i < buffer1.Length; i++) { if (buffer1[i] != buffer2[i + offset]) { return false; } } return true; } protected abstract Exception OnSizeQuotaExceeded(int size); public void Reset() { _currentState = State.ReadingSize; _sizeDecoder.Reset(); } private enum State { ReadingSize, ReadingBytes, Done, } } internal class ViaStringDecoder : StringDecoder { private Uri _via; public ViaStringDecoder(int sizeQuota) : base(sizeQuota) { } protected override Exception OnSizeQuotaExceeded(int size) { Exception result = new InvalidDataException(SR.Format(SR.FramingViaTooLong, size)); FramingEncodingString.AddFaultString(result, FramingEncodingString.ViaTooLongFault); return result; } protected override void OnComplete(string value) { try { _via = new Uri(value); base.OnComplete(value); } catch (UriFormatException exception) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidDataException(SR.Format(SR.FramingViaNotUri, value), exception)); } } public Uri ValueAsUri { get { if (!IsValueDecoded) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidOperationException(SR.FramingValueNotAvailable)); return _via; } } } internal class FaultStringDecoder : StringDecoder { internal const int FaultSizeQuota = 256; public FaultStringDecoder() : base(FaultSizeQuota) { } protected override Exception OnSizeQuotaExceeded(int size) { return new InvalidDataException(SR.Format(SR.FramingFaultTooLong, size)); } public static Exception GetFaultException(string faultString, string via, string contentType) { if (faultString == FramingEncodingString.EndpointNotFoundFault) { return new EndpointNotFoundException(SR.Format(SR.EndpointNotFound, via)); } else if (faultString == FramingEncodingString.ContentTypeInvalidFault) { return new ProtocolException(SR.Format(SR.FramingContentTypeMismatch, contentType, via)); } else if (faultString == FramingEncodingString.ServiceActivationFailedFault) { return new ServiceActivationException(SR.Format(SR.Hosting_ServiceActivationFailed, via)); } else if (faultString == FramingEncodingString.ConnectionDispatchFailedFault) { return new CommunicationException(SR.Format(SR.Sharing_ConnectionDispatchFailed, via)); } else if (faultString == FramingEncodingString.EndpointUnavailableFault) { return new EndpointNotFoundException(SR.Format(SR.Sharing_EndpointUnavailable, via)); } else if (faultString == FramingEncodingString.MaxMessageSizeExceededFault) { Exception inner = new QuotaExceededException(SR.FramingMaxMessageSizeExceeded); return new CommunicationException(inner.Message, inner); } else if (faultString == FramingEncodingString.UnsupportedModeFault) { return new ProtocolException(SR.Format(SR.FramingModeNotSupportedFault, via)); } else if (faultString == FramingEncodingString.UnsupportedVersionFault) { return new ProtocolException(SR.Format(SR.FramingVersionNotSupportedFault, via)); } else if (faultString == FramingEncodingString.ContentTypeTooLongFault) { Exception inner = new QuotaExceededException(SR.Format(SR.FramingContentTypeTooLongFault, contentType)); return new CommunicationException(inner.Message, inner); } else if (faultString == FramingEncodingString.ViaTooLongFault) { Exception inner = new QuotaExceededException(SR.Format(SR.FramingViaTooLongFault, via)); return new CommunicationException(inner.Message, inner); } else if (faultString == FramingEncodingString.ServerTooBusyFault) { return new ServerTooBusyException(SR.Format(SR.ServerTooBusy, via)); } else if (faultString == FramingEncodingString.UpgradeInvalidFault) { return new ProtocolException(SR.Format(SR.FramingUpgradeInvalid, via)); } else { return new ProtocolException(SR.Format(SR.FramingFaultUnrecognized, faultString)); } } } internal class ContentTypeStringDecoder : StringDecoder { public ContentTypeStringDecoder(int sizeQuota) : base(sizeQuota) { } protected override Exception OnSizeQuotaExceeded(int size) { Exception result = new InvalidDataException(SR.Format(SR.FramingContentTypeTooLong, size)); FramingEncodingString.AddFaultString(result, FramingEncodingString.ContentTypeTooLongFault); return result; } public static string GetString(FramingEncodingType type) { switch (type) { case FramingEncodingType.Soap11Utf8: return FramingEncodingString.Soap11Utf8; case FramingEncodingType.Soap11Utf16: return FramingEncodingString.Soap11Utf16; case FramingEncodingType.Soap11Utf16FFFE: return FramingEncodingString.Soap11Utf16FFFE; case FramingEncodingType.Soap12Utf8: return FramingEncodingString.Soap12Utf8; case FramingEncodingType.Soap12Utf16: return FramingEncodingString.Soap12Utf16; case FramingEncodingType.Soap12Utf16FFFE: return FramingEncodingString.Soap12Utf16FFFE; case FramingEncodingType.MTOM: return FramingEncodingString.MTOM; case FramingEncodingType.Binary: return FramingEncodingString.Binary; case FramingEncodingType.BinarySession: return FramingEncodingString.BinarySession; default: return "unknown" + ((int)type).ToString(CultureInfo.InvariantCulture); } } } internal abstract class FramingDecoder { private long _streamPosition; protected FramingDecoder() { } protected FramingDecoder(long streamPosition) { _streamPosition = streamPosition; } protected abstract string CurrentStateAsString { get; } public long StreamPosition { get { return _streamPosition; } set { _streamPosition = value; } } protected void ValidateFramingMode(FramingMode mode) { switch (mode) { case FramingMode.Singleton: case FramingMode.Duplex: case FramingMode.Simplex: case FramingMode.SingletonSized: break; default: { Exception exception = CreateException(new InvalidDataException(SR.Format( SR.FramingModeNotSupported, mode.ToString())), FramingEncodingString.UnsupportedModeFault); throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(exception); } } } protected void ValidateRecordType(FramingRecordType expectedType, FramingRecordType foundType) { if (foundType != expectedType) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(CreateInvalidRecordTypeException(expectedType, foundType)); } } // special validation for Preamble Ack for usability purposes (MB#39593) protected void ValidatePreambleAck(FramingRecordType foundType) { if (foundType != FramingRecordType.PreambleAck) { Exception inner = CreateInvalidRecordTypeException(FramingRecordType.PreambleAck, foundType); string exceptionString; if (((byte)foundType == 'h') || ((byte)foundType == 'H')) { exceptionString = SR.PreambleAckIncorrectMaybeHttp; } else { exceptionString = SR.PreambleAckIncorrect; } throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ProtocolException(exceptionString, inner)); } } private Exception CreateInvalidRecordTypeException(FramingRecordType expectedType, FramingRecordType foundType) { return new InvalidDataException(SR.Format(SR.FramingRecordTypeMismatch, expectedType.ToString(), foundType.ToString())); } protected void ValidateMajorVersion(int majorVersion) { if (majorVersion != FramingVersion.Major) { Exception exception = CreateException(new InvalidDataException(SR.Format( SR.FramingVersionNotSupported, majorVersion)), FramingEncodingString.UnsupportedVersionFault); throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(exception); } } public Exception CreatePrematureEOFException() { return CreateException(new InvalidDataException(SR.FramingPrematureEOF)); } protected Exception CreateException(InvalidDataException innerException, string framingFault) { Exception result = CreateException(innerException); FramingEncodingString.AddFaultString(result, framingFault); return result; } protected Exception CreateException(InvalidDataException innerException) { return new ProtocolException(SR.Format(SR.FramingError, StreamPosition, CurrentStateAsString), innerException); } } internal class SingletonMessageDecoder : FramingDecoder { private IntDecoder _sizeDecoder; private int _chunkBytesNeeded; private int _chunkSize; private State _currentState; public SingletonMessageDecoder(long streamPosition) : base(streamPosition) { _sizeDecoder = new IntDecoder(); _currentState = State.ChunkStart; } public void Reset() { _currentState = State.ChunkStart; } public State CurrentState { get { return _currentState; } } protected override string CurrentStateAsString { get { return _currentState.ToString(); } } public int ChunkSize { get { if (_currentState < State.ChunkStart) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidOperationException(SR.FramingValueNotAvailable)); } return _chunkSize; } } public int Decode(byte[] bytes, int offset, int size) { DecoderHelper.ValidateSize(size); try { int bytesConsumed; switch (_currentState) { case State.ReadingEnvelopeChunkSize: bytesConsumed = _sizeDecoder.Decode(bytes, offset, size); if (_sizeDecoder.IsValueDecoded) { _chunkSize = _sizeDecoder.Value; _sizeDecoder.Reset(); if (_chunkSize == 0) { _currentState = State.EnvelopeEnd; } else { _currentState = State.ChunkStart; _chunkBytesNeeded = _chunkSize; } } break; case State.ChunkStart: bytesConsumed = 0; _currentState = State.ReadingEnvelopeBytes; break; case State.ReadingEnvelopeBytes: bytesConsumed = size; if (bytesConsumed > _chunkBytesNeeded) { bytesConsumed = _chunkBytesNeeded; } _chunkBytesNeeded -= bytesConsumed; if (_chunkBytesNeeded == 0) { _currentState = State.ChunkEnd; } break; case State.ChunkEnd: bytesConsumed = 0; _currentState = State.ReadingEnvelopeChunkSize; break; case State.EnvelopeEnd: ValidateRecordType(FramingRecordType.End, (FramingRecordType)bytes[offset]); bytesConsumed = 1; _currentState = State.End; break; case State.End: throw DiagnosticUtility.ExceptionUtility.ThrowHelperError( CreateException(new InvalidDataException(SR.FramingAtEnd))); default: throw DiagnosticUtility.ExceptionUtility.ThrowHelperError( CreateException(new InvalidDataException(SR.InvalidDecoderStateMachine))); } StreamPosition += bytesConsumed; return bytesConsumed; } catch (InvalidDataException e) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(CreateException(e)); } } public enum State { ReadingEnvelopeChunkSize, ChunkStart, ReadingEnvelopeBytes, ChunkEnd, EnvelopeEnd, End, } } // common set of states used on the client-side. internal enum ClientFramingDecoderState { ReadingUpgradeRecord, ReadingUpgradeMode, UpgradeResponse, ReadingAckRecord, Start, ReadingFault, ReadingFaultString, Fault, ReadingEnvelopeRecord, ReadingEnvelopeSize, EnvelopeStart, ReadingEnvelopeBytes, EnvelopeEnd, ReadingEndRecord, End, } internal abstract class ClientFramingDecoder : FramingDecoder { private ClientFramingDecoderState _currentState; protected ClientFramingDecoder(long streamPosition) : base(streamPosition) { _currentState = ClientFramingDecoderState.ReadingUpgradeRecord; } public ClientFramingDecoderState CurrentState { get { return _currentState; } protected set { _currentState = value; } } protected override string CurrentStateAsString { get { return _currentState.ToString(); } } public abstract string Fault { get; } public abstract int Decode(byte[] bytes, int offset, int size); } // Pattern: // (UpgradeResponse, upgrade-bytes)*, (Ack | Fault), // ((EnvelopeStart, ReadingEnvelopeBytes*, EnvelopeEnd) | Fault)*, // End internal class ClientDuplexDecoder : ClientFramingDecoder { private IntDecoder _sizeDecoder; private FaultStringDecoder _faultDecoder; private int _envelopeBytesNeeded; private int _envelopeSize; public ClientDuplexDecoder(long streamPosition) : base(streamPosition) { _sizeDecoder = new IntDecoder(); } public int EnvelopeSize { get { if (CurrentState < ClientFramingDecoderState.EnvelopeStart) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidOperationException(SR.FramingValueNotAvailable)); return _envelopeSize; } } public override string Fault { get { if (CurrentState < ClientFramingDecoderState.Fault) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidOperationException(SR.FramingValueNotAvailable)); return _faultDecoder.Value; } } public override int Decode(byte[] bytes, int offset, int size) { DecoderHelper.ValidateSize(size); try { int bytesConsumed; FramingRecordType recordType; switch (CurrentState) { case ClientFramingDecoderState.ReadingUpgradeRecord: recordType = (FramingRecordType)bytes[offset]; if (recordType == FramingRecordType.UpgradeResponse) { bytesConsumed = 1; base.CurrentState = ClientFramingDecoderState.UpgradeResponse; } else { bytesConsumed = 0; base.CurrentState = ClientFramingDecoderState.ReadingAckRecord; } break; case ClientFramingDecoderState.UpgradeResponse: bytesConsumed = 0; base.CurrentState = ClientFramingDecoderState.ReadingUpgradeRecord; break; case ClientFramingDecoderState.ReadingAckRecord: recordType = (FramingRecordType)bytes[offset]; if (recordType == FramingRecordType.Fault) { bytesConsumed = 1; _faultDecoder = new FaultStringDecoder(); base.CurrentState = ClientFramingDecoderState.ReadingFaultString; break; } ValidatePreambleAck(recordType); bytesConsumed = 1; base.CurrentState = ClientFramingDecoderState.Start; break; case ClientFramingDecoderState.Start: bytesConsumed = 0; base.CurrentState = ClientFramingDecoderState.ReadingEnvelopeRecord; break; case ClientFramingDecoderState.ReadingEnvelopeRecord: recordType = (FramingRecordType)bytes[offset]; if (recordType == FramingRecordType.End) { bytesConsumed = 1; base.CurrentState = ClientFramingDecoderState.End; break; } else if (recordType == FramingRecordType.Fault) { bytesConsumed = 1; _faultDecoder = new FaultStringDecoder(); base.CurrentState = ClientFramingDecoderState.ReadingFaultString; break; } ValidateRecordType(FramingRecordType.SizedEnvelope, recordType); bytesConsumed = 1; base.CurrentState = ClientFramingDecoderState.ReadingEnvelopeSize; _sizeDecoder.Reset(); break; case ClientFramingDecoderState.ReadingEnvelopeSize: bytesConsumed = _sizeDecoder.Decode(bytes, offset, size); if (_sizeDecoder.IsValueDecoded) { base.CurrentState = ClientFramingDecoderState.EnvelopeStart; _envelopeSize = _sizeDecoder.Value; _envelopeBytesNeeded = _envelopeSize; } break; case ClientFramingDecoderState.EnvelopeStart: bytesConsumed = 0; base.CurrentState = ClientFramingDecoderState.ReadingEnvelopeBytes; break; case ClientFramingDecoderState.ReadingEnvelopeBytes: bytesConsumed = size; if (bytesConsumed > _envelopeBytesNeeded) bytesConsumed = _envelopeBytesNeeded; _envelopeBytesNeeded -= bytesConsumed; if (_envelopeBytesNeeded == 0) base.CurrentState = ClientFramingDecoderState.EnvelopeEnd; break; case ClientFramingDecoderState.EnvelopeEnd: bytesConsumed = 0; base.CurrentState = ClientFramingDecoderState.ReadingEnvelopeRecord; break; case ClientFramingDecoderState.ReadingFaultString: bytesConsumed = _faultDecoder.Decode(bytes, offset, size); if (_faultDecoder.IsValueDecoded) { base.CurrentState = ClientFramingDecoderState.Fault; } break; case ClientFramingDecoderState.Fault: bytesConsumed = 0; base.CurrentState = ClientFramingDecoderState.ReadingEndRecord; break; case ClientFramingDecoderState.ReadingEndRecord: ValidateRecordType(FramingRecordType.End, (FramingRecordType)bytes[offset]); bytesConsumed = 1; base.CurrentState = ClientFramingDecoderState.End; break; case ClientFramingDecoderState.End: throw DiagnosticUtility.ExceptionUtility.ThrowHelperError( CreateException(new InvalidDataException(SR.FramingAtEnd))); default: throw DiagnosticUtility.ExceptionUtility.ThrowHelperError( CreateException(new InvalidDataException(SR.InvalidDecoderStateMachine))); } StreamPosition += bytesConsumed; return bytesConsumed; } catch (InvalidDataException e) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(CreateException(e)); } } } // Pattern: // (UpgradeResponse, upgrade-bytes)*, (Ack | Fault), // End internal class ClientSingletonDecoder : ClientFramingDecoder { private FaultStringDecoder _faultDecoder; public ClientSingletonDecoder(long streamPosition) : base(streamPosition) { } public override string Fault { get { if (CurrentState < ClientFramingDecoderState.Fault) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidOperationException(SR.FramingValueNotAvailable)); return _faultDecoder.Value; } } public override int Decode(byte[] bytes, int offset, int size) { DecoderHelper.ValidateSize(size); try { int bytesConsumed; FramingRecordType recordType; switch (CurrentState) { case ClientFramingDecoderState.ReadingUpgradeRecord: recordType = (FramingRecordType)bytes[offset]; if (recordType == FramingRecordType.UpgradeResponse) { bytesConsumed = 1; base.CurrentState = ClientFramingDecoderState.UpgradeResponse; } else { bytesConsumed = 0; base.CurrentState = ClientFramingDecoderState.ReadingAckRecord; } break; case ClientFramingDecoderState.UpgradeResponse: bytesConsumed = 0; base.CurrentState = ClientFramingDecoderState.ReadingUpgradeRecord; break; case ClientFramingDecoderState.ReadingAckRecord: recordType = (FramingRecordType)bytes[offset]; if (recordType == FramingRecordType.Fault) { bytesConsumed = 1; _faultDecoder = new FaultStringDecoder(); base.CurrentState = ClientFramingDecoderState.ReadingFaultString; break; } ValidatePreambleAck(recordType); bytesConsumed = 1; base.CurrentState = ClientFramingDecoderState.Start; break; case ClientFramingDecoderState.Start: bytesConsumed = 0; base.CurrentState = ClientFramingDecoderState.ReadingEnvelopeRecord; break; case ClientFramingDecoderState.ReadingEnvelopeRecord: recordType = (FramingRecordType)bytes[offset]; if (recordType == FramingRecordType.End) { bytesConsumed = 1; base.CurrentState = ClientFramingDecoderState.End; break; } else if (recordType == FramingRecordType.Fault) { bytesConsumed = 0; base.CurrentState = ClientFramingDecoderState.ReadingFault; break; } ValidateRecordType(FramingRecordType.UnsizedEnvelope, recordType); bytesConsumed = 1; base.CurrentState = ClientFramingDecoderState.EnvelopeStart; break; case ClientFramingDecoderState.EnvelopeStart: throw DiagnosticUtility.ExceptionUtility.ThrowHelperError( CreateException(new InvalidDataException(SR.FramingAtEnd))); case ClientFramingDecoderState.ReadingFault: recordType = (FramingRecordType)bytes[offset]; ValidateRecordType(FramingRecordType.Fault, recordType); bytesConsumed = 1; _faultDecoder = new FaultStringDecoder(); base.CurrentState = ClientFramingDecoderState.ReadingFaultString; break; case ClientFramingDecoderState.ReadingFaultString: bytesConsumed = _faultDecoder.Decode(bytes, offset, size); if (_faultDecoder.IsValueDecoded) { base.CurrentState = ClientFramingDecoderState.Fault; } break; case ClientFramingDecoderState.Fault: throw DiagnosticUtility.ExceptionUtility.ThrowHelperError( CreateException(new InvalidDataException(SR.FramingAtEnd))); default: throw DiagnosticUtility.ExceptionUtility.ThrowHelperError( CreateException(new InvalidDataException(SR.InvalidDecoderStateMachine))); } StreamPosition += bytesConsumed; return bytesConsumed; } catch (InvalidDataException e) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(CreateException(e)); } } } }
using UnityEditor.VersionControl; using UnityEngine; using System.Collections; using System.Collections.Generic; using System.Reflection; using System.Linq; using UnityEditor; namespace PrefabEvolution { [InitializeOnLoad] static internal class PEUtils { static PEUtils() { EditorUtility.ClearProgressBar();// EditorApplication.hierarchyWindowItemOnGUI += OnItemGUI; } #region ReplaceReference static internal void ReplaceReference(Object root, Object from, Object to) { ReplaceReference(EditorUtility.CollectDeepHierarchy(new [] { root }), from, to); } static internal void ReplaceReference(Object[] roots, Object from, Object to) { var dict = new Dictionary<Object, Object>(); dict.Add(from, to); ReplaceReference(roots, dict); } static internal void ReplaceReference(Object root, IDictionary<Object, Object> dict) { ReplaceReference(EditorUtility.CollectDeepHierarchy(new [] { root }), dict); } static internal void ReplaceReference(Object[] roots, IDictionary<Object, Object> dict) { foreach (var obj in roots) { if (obj == null) continue; var so = new SerializedObject(obj); var property = so.GetIterator(); while (property.Next(true)) { if (property.propertyType != SerializedPropertyType.ObjectReference) continue; if (PropertyFilter(property)) continue; if (property.objectReferenceValue == null) continue; Object result; if (!dict.TryGetValue(property.objectReferenceValue, out result)) continue; property.objectReferenceValue = result; property.serializedObject.ApplyModifiedProperties(); } } } #endregion #region etc static public void Foreach<T>(this IEnumerable<T> targets, System.Action<T> action) { foreach (var t in targets) { action(t); } } static internal void SetParentAndSaveLocalTransform(Transform child, Transform parent) { var lp = child.localPosition; var lr = child.localRotation; var ls = child.localScale; #if UNITY_4_6 || UNITY_5_0 || UNITY_5 Vector2 sizeDelta = Vector2.zero; var rectTransform = child as RectTransform; if (rectTransform) sizeDelta = rectTransform.sizeDelta; child.parent = parent; if (rectTransform) rectTransform.sizeDelta = sizeDelta; #else child.parent = parent; #endif child.localPosition = lp; child.localRotation = lr; child.localScale = ls; } static internal IEnumerable<PEPrefabScript> GetNestedInstances(GameObject gameObject) { var instances = gameObject.GetComponentsInChildren<PEPrefabScript>(true); var rootInstance = gameObject.GetComponent<PEPrefabScript>(); foreach (var instance in instances) { if (instance == rootInstance) continue; var parentInstance = instance.gameObject.GetInParent<PEPrefabScript>(); if (parentInstance == null) { yield return instance; } else if (parentInstance == rootInstance) { if (rootInstance.ParentPrefab == null) yield return instance; else if (rootInstance.ParentPrefab.GetComponent<PEPrefabScript>().Links[parentInstance.Links[instance]] == null) yield return instance; } else { if (parentInstance.IsNonPrefabObject(instance)) yield return instance; } } } private static T GetInParent<T>(this GameObject obj) where T : MonoBehaviour { var item = obj.transform.parent; while (item != null) { var result = item.GetComponent<T>(); if (result != null) return result; item = item.parent; } return null; } static internal void ExecuteOnEditorUpdate(this IEnumerator enumerator) { EditorApplication.CallbackFunction self = null; EditorApplication.CallbackFunction func = () => { if (!enumerator.MoveNext()) EditorApplication.update -= self; }; self = func; EditorApplication.update += func; } #endregion #region GUI static internal GUIStyle emptyStyle = new GUIStyle(); static void OnItemGUI(int instanceID, Rect rect) { var instance = EditorUtility.InstanceIDToObject(instanceID) as GameObject; if (instance == null) return; var prefabInstance = instance.GetComponent<PEPrefabScript>(); var isPrefab = PrefabUtility.GetPrefabParent(instance) && PrefabUtility.FindPrefabRoot(instance) == instance; if (prefabInstance) { bool rootPrefab = PrefabUtility.GetPrefabParent(prefabInstance.gameObject) == prefabInstance.Prefab; var color = GUI.color; GUI.color = rootPrefab ? Color.green : (Color.yellow); if (!prefabInstance.enabled) GUI.color = Color.white; if (prefabInstance.Prefab == null) GUI.color = Color.red; const int width = 15; var br = rect; br.height -= 2; br.y += 2 / 2; br.x += br.width - width; br.width = width; var content = new GUIContent(PEResources.icon, prefabInstance.Prefab ? prefabInstance.Prefab.name : "Missiog prefab with guid: " + prefabInstance.PrefabGUID); var click = GUI.Button(br, content, emptyStyle); GUI.color = color; var evt = Event.current; if (prefabInstance.Prefab && (evt.type == EventType.ContextClick || click || evt.type == EventType.MouseUp)) { var mousePos = evt.mousePosition; if (br.Contains(mousePos)) { var menu = new GenericMenu(); BuildMenu(menu, prefabInstance, rootPrefab); menu.ShowAsContext(); evt.Use(); } } } else if (isPrefab) { var click = PEPrefs.AutoPrefabs; if (click) MakeNested(instance); } } static private LinkedList<GameObject> buildMenuRecursionList = new LinkedList<GameObject>(); static internal void BuildMenu(GenericMenu menu, PEPrefabScript prefabInstance, bool rootPrefab, string path = "", bool showParent = true, bool showInstances = true) { if (buildMenuRecursionList.Contains(prefabInstance.Prefab)) { buildMenuRecursionList.AddLast(prefabInstance.Prefab); var prefabsArray = buildMenuRecursionList.Select(p => AssetDatabase.GetAssetPath(p)).ToArray(); buildMenuRecursionList.Clear(); throw new System.Exception("Prefab recursion detected:\n" + string.Join("\n", prefabsArray)); } buildMenuRecursionList.AddLast(prefabInstance.Prefab); if (prefabInstance.ParentPrefab == null || !showParent) menu.AddItem(new GUIContent(path + prefabInstance.Prefab.name), false, () => { }); else { BuildMenu(menu, prefabInstance.ParentPrefab.GetComponent<PEPrefabScript>(), false, path + prefabInstance.Prefab.name + "/", true, false); menu.AddItem(new GUIContent(path + prefabInstance.Prefab.name), false, () => { }); } menu.AddSeparator(path + ""); var isPrefab = prefabInstance.gameObject == prefabInstance.Prefab.gameObject; menu.AddItem(new GUIContent(path + "Select"), false, SelectPrefab, prefabInstance); var prefabType = PrefabUtility.GetPrefabType(prefabInstance.gameObject); var canApply = rootPrefab && prefabType != PrefabType.ModelPrefab && prefabType != PrefabType.ModelPrefabInstance && prefabType != PrefabType.DisconnectedModelPrefabInstance; if (canApply) { menu.AddItem(new GUIContent(path + "Apply"), false, Apply, prefabInstance); } if (!AssetDatabase.Contains(prefabInstance) || !isPrefab) { menu.AddItem(new GUIContent(path + "Revert"), false, Revert, prefabInstance); if (prefabInstance.ParentPrefab != null) menu.AddItem(new GUIContent(path + "Revert To Parent"), false, RevertToParent, prefabInstance); } menu.AddSeparator(path + ""); menu.AddItem(new GUIContent(path + "Create Child"), false, CreateChild, prefabInstance); #if INJECTION if (prefabInstance.ParentPrefab != null) menu.AddItem(new GUIContent(path + "Insert Parent"), false, InjectParent, prefabInstance); #endif if (!rootPrefab && !AssetDatabase.Contains(prefabInstance)) { menu.AddSeparator(path); if (prefabInstance.enabled) menu.AddItem(new GUIContent(path + "Disable"), false, obj => (obj as PEPrefabScript).enabled = false, prefabInstance); else menu.AddItem(new GUIContent(path + "Enable"), false, obj => (obj as PEPrefabScript).enabled = true, prefabInstance); } menu.AddSeparator(path); if (prefabInstance.GetPrefabsWithInstances().Any()) menu.AddItem(new GUIContent(path + "Instances/Select All Instances"), false, SelectInstances, prefabInstance); if (showInstances) foreach (var prefab in prefabInstance.GetPrefabsWithInstances()) { if (prefab == null) continue; var pi = prefab.GetComponent<PEPrefabScript>(); var name = prefab.name; name = (pi != null && pi.ParentPrefab == prefabInstance.Prefab) ? "Child: " + name : "Contains in: " + name; if (pi != null) BuildMenu(menu, prefab.GetComponent<PEPrefabScript>(), false, path + "Instances/" + name + "/", false); var current = prefab; menu.AddItem(new GUIContent(path + "Instances/" + name), false, () => { Selection.activeObject = current; }); } menu.AddItem(new GUIContent(path + "Instantiate"), false, pi => Selection.activeObject = PrefabUtility.InstantiatePrefab(((PEPrefabScript)pi).Prefab), prefabInstance); if (!AssetDatabase.Contains(prefabInstance)) menu.AddItem(new GUIContent(path + "Replace"), false, Replace, prefabInstance); buildMenuRecursionList.Remove(prefabInstance.Prefab); } static internal void Replace(object prefabInstance) { SelectObjectRoutine(prefabInstance as PEPrefabScript).ExecuteOnEditorUpdate(); } static IEnumerator SelectObjectRoutine(PEPrefabScript prefabInstance) { EditorGUIUtility.ShowObjectPicker<GameObject>(null, false, "t:Prefab", 1); Object obj = null; while (EditorGUIUtility.GetObjectPickerControlID() == 1) { obj = EditorGUIUtility.GetObjectPickerObject(); yield return null; } if (obj != null && EditorUtility.DisplayDialog("Replace", string.Format("Do you want to replace {0} with {1}", prefabInstance.gameObject.name, obj.name), "Replace", "Cancel")) { prefabInstance.ReplaceInPlace(obj as GameObject, EditorUtility.DisplayDialog("Replace", "Apply modifications from current instance?", "Apply", "Don't apply"), false); } } static internal bool IsCousin(GameObject b, GameObject c) { if (b == null || c == null) return false; var bi = b.GetComponent<PEPrefabScript>(); var ci = c.GetComponent<PEPrefabScript>(); return bi != null && ci != null && (ci.ParentPrefab == bi.Prefab || IsCousin(b, ci.ParentPrefab)); } static internal void MakeNested(GameObject instance) { var parent = PrefabUtility.GetPrefabParent(instance); var prefab = parent == null ? instance : parent as GameObject; if (prefab.GetComponent<EvolvePrefab>() != null) return; var pi = (PEPrefabScript)prefab.AddComponent<EvolvePrefab>(); pi.Prefab = prefab; pi.BuildLinks(); } static void SelectInstances(object obj) { Selection.objects = ((PEPrefabScript)obj).GetPrefabsWithInstances().ToArray(); } static void Apply(object obj) { DoApply((PEPrefabScript)obj); } static internal void DoApply(PEPrefabScript script) { if (PEPrefs.DebugLevel > 0) Debug.Log("DoApply Start"); script.ApplyChanges(true); if (PEPrefs.DebugLevel > 0) Debug.Log("DoApply Completed"); DoAutoSave(); } static internal void DoAutoSave() { EditorUtility.ClearProgressBar(); if (PEPrefs.AutoSaveAfterApply) EditorApplication.delayCall += AssetDatabase.SaveAssets; } static internal void RevertToParent(object obj) { ((PEPrefabScript)obj).RevertToParent(); } static internal void Revert(object obj) { ((PEPrefabScript)obj).Revert(); } static internal void CreateChild(object obj) { var pi = (PEPrefabScript)obj; var path = AssetDatabase.GenerateUniqueAssetPath(System.IO.Path.ChangeExtension(AssetDatabase.GUIDToAssetPath(pi.PrefabGUID), null) + "_Child.prefab"); var go = PrefabUtility.CreatePrefab(path, pi.gameObject, ReplacePrefabOptions.Default); var prefabInstance = go.GetComponent<PEPrefabScript>(); prefabInstance.Prefab = go; prefabInstance.ParentPrefab = pi.Prefab; prefabInstance.ParentPrefabGUID = pi.PrefabGUID; prefabInstance.BuildModifications(); Selection.activeObject = PrefabUtility.InstantiatePrefab(go); AssetDatabase.ImportAsset(path); PECache.Instance.CheckPrefab(path); } static internal void InjectChild(PEPrefabScript obj, GameObject[] children) { var path = AssetDatabase.GenerateUniqueAssetPath(System.IO.Path.ChangeExtension(AssetDatabase.GUIDToAssetPath(obj.PrefabGUID), null) + "_Child_Injected.prefab"); var go = PrefabUtility.CreatePrefab(path, obj.gameObject, ReplacePrefabOptions.Default); var prefabInstance = go.GetComponent<PEPrefabScript>(); prefabInstance.Prefab = go; prefabInstance.ParentPrefabGUID = obj.PrefabGUID; prefabInstance.BuildModifications(); Selection.activeObject = PrefabUtility.InstantiatePrefab(go); AssetDatabase.ImportAsset(path); PECache.Instance.CheckPrefab(path); foreach (var child in children) { child.GetComponent<PEPrefabScript>().ParentPrefabGUID = AssetDatabase.AssetPathToGUID(path); PECache.Instance.CheckPrefab(AssetDatabase.GUIDToAssetPath(child.GetComponent<PEPrefabScript>().PrefabGUID)); } } static internal void InjectParent(object obj) { var pi = (PEPrefabScript)obj; var path = AssetDatabase.GenerateUniqueAssetPath(System.IO.Path.ChangeExtension(AssetDatabase.GUIDToAssetPath(pi.PrefabGUID), null) + "_Parent_Inserted.prefab"); var go = PrefabUtility.CreatePrefab(path, pi.gameObject, ReplacePrefabOptions.Default); var prefabInstance = go.GetComponent<PEPrefabScript>(); prefabInstance.Prefab = go; prefabInstance.ParentPrefabGUID = pi.ParentPrefabGUID; prefabInstance.BuildModifications(); Selection.activeObject = PrefabUtility.InstantiatePrefab(go); AssetDatabase.ImportAsset(path); PECache.Instance.CheckPrefab(path); pi.ParentPrefabGUID = prefabInstance.PrefabGUID; PECache.Instance.CheckPrefab(AssetDatabase.GUIDToAssetPath(pi.PrefabGUID)); } static void SelectPrefab(object obj) { Selection.activeObject = ((PEPrefabScript)obj).Prefab; } static internal IEnumerable<GameObject> GetPrefabsWithInstance(string guid) { return PECache.Instance.GetPrefabsWithInstances(guid); } #endregion #region Assets static public void ApplyPrefab(GameObject gameObject) { gameObject = PrefabUtility.FindPrefabRoot(gameObject); var pi = gameObject.GetComponent<PEPrefabScript>(); if (pi == null) { DefaultApply(gameObject); DoAutoSave(); } else DoApply(pi); } static public void ApplyPrefab(GameObject[] targets) { var list = new List<GameObject>(); foreach (var target in targets) { var root = PrefabUtility.FindPrefabRoot(target); list.RemoveAll(r => r == root); list.Add(root); } foreach (GameObject target in list) ApplyPrefab(target); } static void DefaultApply(GameObject obj) { foreach (var pi in obj.GetComponentsInChildren<PEPrefabScript>(true)) pi.BuildModifications(); var gameObject = obj; var prefabType = PrefabUtility.GetPrefabType(gameObject); if (prefabType == PrefabType.PrefabInstance || prefabType == PrefabType.DisconnectedPrefabInstance) { var gameObject2 = PrefabUtility.FindValidUploadPrefabInstanceRoot(gameObject); if (gameObject2 == null) return; var prefabParent = PrefabUtility.GetPrefabParent(gameObject2); var assetPath = AssetDatabase.GetAssetPath(prefabParent); var method = typeof(Provider).GetMethod("PromptAndCheckoutIfNeeded", BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Static | BindingFlags.Instance); var canReplace = (bool)method.Invoke(null, new object[] { new [] { assetPath }, "The version control requires you to checkout the prefab before applying changes." }); if (canReplace) { PrefabUtility.ReplacePrefab(gameObject2, prefabParent, ReplacePrefabOptions.ConnectToPrefab); } } } static internal T GetAssetByGUID<T>(string GUID) where T : Object { return GetAssetByPath<T>(AssetDatabase.GUIDToAssetPath(GUID)); } static internal string GetAssetGUID(Object asset) { return AssetDatabase.AssetPathToGUID(AssetDatabase.GetAssetPath(asset)); } static internal T GetAssetByPath<T>(string assetPath) where T : Object { return AssetDatabase.LoadAssetAtPath(assetPath, typeof(T)) as T; } #endregion static internal T GetInstance<T>(this SerializedProperty property) { return (T)GetInstance(property); } static internal object GetInstance(object obj, string path) { path = path.Replace(".Array.data", ""); var split = path.Split('.'); var stack = split; object v = obj; try { foreach (var name in stack) { if (name.Contains("[")) { var n = name.Split('[', ']'); v = GetField(v, n[0], int.Parse(n[1])); } else v = GetField(v, name); } } catch (System.Exception e) { Debug.LogException(e); return null; } return v; } static internal object GetInstance(this SerializedProperty property) { var obj = property.serializedObject.targetObject; var path = property.propertyPath; return GetInstance(obj, path); } private static object GetField(object obj, string field, int index = -1) { try { var obj2 = obj.GetType().GetField(field).GetValue(obj); return index == -1 ? obj2 : (obj2 as IList)[index]; } catch (System.Exception) { return null; } } #region Serialization static internal bool PropertyFilter(SerializedProperty property) { return property.propertyPath.Contains("m_Prefab") || property.propertyPath.Contains("m_FileID") || property.propertyPath.Contains("m_PathID") || property.propertyPath.Contains("m_ObjectHideFlags") || property.propertyPath.Contains("m_Children") || property.propertyPath.Contains("m_Father") || property.propertyPath.Contains("m_GameObject") || property.propertyPath.Contains("m_Component"); } static internal object GetPropertyValue(this SerializedProperty prop) { switch (prop.propertyType) { case SerializedPropertyType.Integer: return prop.intValue; case SerializedPropertyType.Boolean: return prop.boolValue; case SerializedPropertyType.Float: return prop.floatValue; case SerializedPropertyType.String: return prop.stringValue; case SerializedPropertyType.Color: return prop.colorValue; case SerializedPropertyType.ObjectReference: return prop.objectReferenceValue; case SerializedPropertyType.LayerMask: return prop.intValue; case SerializedPropertyType.Enum: return prop.enumValueIndex; case SerializedPropertyType.Vector2: return prop.vector2Value; case SerializedPropertyType.Vector3: return prop.vector3Value; case SerializedPropertyType.Quaternion: return prop.quaternionValue; case SerializedPropertyType.Rect: return prop.rectValue; case SerializedPropertyType.ArraySize: return prop.intValue; case SerializedPropertyType.Character: return prop.intValue; case SerializedPropertyType.AnimationCurve: return prop.animationCurveValue; case SerializedPropertyType.Bounds: return prop.boundsValue; case SerializedPropertyType.Gradient: break; } return null; } static internal void SetPropertyValue(this SerializedProperty prop, object value) { if(SetInternalPropertyValue(prop, value)) return; switch (prop.propertyType) { case SerializedPropertyType.Integer: prop.intValue = (int)value; break; case SerializedPropertyType.Boolean: prop.boolValue = (bool)value; break; case SerializedPropertyType.Float: prop.floatValue = (float)value; break; case SerializedPropertyType.String: prop.stringValue = (string)value; break; case SerializedPropertyType.Color: prop.colorValue = (Color)value; break; case SerializedPropertyType.ObjectReference: prop.objectReferenceValue = (Object)value; break; case SerializedPropertyType.LayerMask: prop.intValue = (int)value; break; case SerializedPropertyType.Enum: prop.enumValueIndex = (int)value; break; case SerializedPropertyType.Vector2: prop.vector2Value = (Vector2)value; break; case SerializedPropertyType.Vector3: prop.vector3Value = (Vector3)value; break; case SerializedPropertyType.Quaternion: prop.quaternionValue = (Quaternion)value; break; case SerializedPropertyType.Rect: prop.rectValue = (Rect)value; break; case SerializedPropertyType.ArraySize: prop.intValue = (int)value; break; case SerializedPropertyType.Character: prop.intValue = (int)value; break; case SerializedPropertyType.AnimationCurve: prop.animationCurveValue = (AnimationCurve)value; break; case SerializedPropertyType.Bounds: prop.boundsValue = (Bounds)value; break; case SerializedPropertyType.Gradient: break; } } static private bool SetInternalPropertyValue(this SerializedProperty prop, object value) { var targetTransform = prop.serializedObject.targetObject as Transform; if (targetTransform != null) { switch (prop.propertyPath) { case "m_RootOrder": targetTransform.SetSiblingIndex((int)value); return false; case "m_Father": SetParentAndSaveLocalTransform(targetTransform, (Transform)value); return true; default: return false; } } return false; } static internal void CopyPrefabInternalData(Object src, Object dest) { if (src == null || dest == null) { if (PEPrefs.DebugLevel > 0) Debug.LogError(string.Format("Failed to copy internal prefab data src:{0} dst:{1}", src, dest)); return; } var destSO = new SerializedObject(dest); var srcSO = new SerializedObject(src); destSO.FindProperty("m_PrefabParentObject").SetPropertyValue(srcSO.FindProperty("m_PrefabParentObject").GetPropertyValue()); destSO.FindProperty("m_PrefabInternal").SetPropertyValue(srcSO.FindProperty("m_PrefabInternal").GetPropertyValue()); destSO.ApplyModifiedProperties(); } static internal bool Compare(AnimationCurve c0, AnimationCurve c1) { if (c0 == null && c1 != null || c0 != null && c1 == null) return false; if (c0 == c1) return true; if (c0.postWrapMode != c1.postWrapMode) return false; if (c0.preWrapMode != c1.preWrapMode) return false; if (c0.keys == null && c1.keys != null) return false; if (c0.keys != null && c1.keys == null) return false; if (c0.keys != null && c1.keys != null) { if (c0.keys.Length != c1.keys.Length) return false; return !c0.keys.Where((t, i) => !Compare(t, c1.keys[i])).Any(); } return true; } static internal bool Compare(Keyframe c0, Keyframe c1) { return c0.inTangent == c1.inTangent && c0.outTangent == c1.outTangent && c0.tangentMode == c1.tangentMode && c0.time == c1.time && c0.value == c1.value; } static internal Component CopyComponentTo(this Component component, GameObject gameObject) { if (component == null) { if (PEPrefs.DebugLevel > 0) Debug.Log("Trying to copy null component"); return null; } if (PEPrefs.DebugLevel > 0) Debug.Log(string.Format("Add component {0} to {1}", component, gameObject)); var newComponent = gameObject.AddComponent(component.GetType()); if (!newComponent) { if (PEPrefs.DebugLevel > 0) Debug.LogWarning(string.Format("Failed to copy component of type {0}", component.GetType())); return null; } EditorUtility.CopySerialized(component, newComponent); return newComponent; } #endregion } }
//--------------------------------------------------------------------------- // // <copyright file=TextTreeTextBlock.cs company=Microsoft> // Copyright (C) Microsoft Corporation. All rights reserved. // </copyright> // // // Description: A block of text stored in a TextContainer. // // History: // 2/18/2004 : [....] - Created // //--------------------------------------------------------------------------- using System; using MS.Internal; namespace System.Windows.Documents { // Each TextContainer maintains an array of TextTreeTextBlocks that holds all // the raw text in the tree. // // TextTreeTextBlocks are simple char arrays with some extra state that // tracks current char count vs capacity. Instead of simply storing // text at the head of the array, we use the "buffer gap" algorithm. // Free space in the array always follows the last insertion, so with // sequential writes we don't need to memmove any existing text. internal class TextTreeTextBlock : SplayTreeNode { //------------------------------------------------------ // // Constructors // //------------------------------------------------------ #region Constructors // Create a new TextTreeTextBlock instance. internal TextTreeTextBlock(int size) { Invariant.Assert(size > 0); Invariant.Assert(size <= MaxBlockSize); _text = new char[size]; _gapSize = size; } #endregion Constructors //------------------------------------------------------ // // Public Methods // //------------------------------------------------------ #region Public Methods #if DEBUG // Debug-only ToString override. public override string ToString() { return ("TextTreeTextBlock Id=" + this.DebugId + " Count=" + this.Count); } #endif // DEBUG #endregion Public Methods //------------------------------------------------------ // // Internal Methods // //------------------------------------------------------ #region Internal Methods // Inserts text into the block, up to the remaining block capacity. // Returns the number of chars actually inserted. internal int InsertText(int logicalOffset, object text, int textStartIndex, int textEndIndex) { int count; string textString; char[] textChars; char[] newText; int rightOfGapLength; Invariant.Assert(text is string || text is char[], "Bad text parameter!"); Invariant.Assert(textStartIndex <= textEndIndex, "Bad start/end index!"); // Splay this node so we don't invalidate any LeftSymbolCounts. Splay(); count = textEndIndex - textStartIndex; if (_text.Length < MaxBlockSize && count > _gapSize) { // We need to grow this block. // We're very conservative here, allocating no more than the // caller asks for. Once we push past the MaxBlockSize, we'll // be more aggressive with allocations. newText = new char[Math.Min(this.Count + count, MaxBlockSize)]; Array.Copy(_text, 0, newText, 0, _gapOffset); rightOfGapLength = _text.Length - (_gapOffset + _gapSize); Array.Copy(_text, _gapOffset + _gapSize, newText, newText.Length - rightOfGapLength, rightOfGapLength); _gapSize += newText.Length - _text.Length; _text = newText; } // Move the gap to the insert point. if (logicalOffset != _gapOffset) { MoveGap(logicalOffset); } // Truncate the copy. count = Math.Min(count, _gapSize); textString = text as string; if (textString != null) { // Do the work. textString.CopyTo(textStartIndex, _text, logicalOffset, count); } else { textChars = (char[])text; // Do the work. Array.Copy(textChars, textStartIndex, _text, logicalOffset, count); } // Update the gap. _gapOffset += count; _gapSize -= count; return count; } // Splits this block at the current gap offset. // Only called during a text insert, when the block is full. // If GapOffset < TextTreeTextBlock.MaxBlockSize / 2, returns // a new block with the left text, otherwise returns a new // block with the right text. internal TextTreeTextBlock SplitBlock() { TextTreeTextBlock newBlock; bool insertBefore; Invariant.Assert(_gapSize == 0, "Splitting non-full block!"); Invariant.Assert(_text.Length == MaxBlockSize, "Splitting non-max sized block!"); newBlock = new TextTreeTextBlock(MaxBlockSize); if (_gapOffset < MaxBlockSize / 2) { // Copy the left text over to the new block. Array.Copy(_text, 0, newBlock._text, 0, _gapOffset); newBlock._gapOffset = _gapOffset; newBlock._gapSize = MaxBlockSize - _gapOffset; // Remove the left text from this block. _gapSize += _gapOffset; _gapOffset = 0; // New node preceeds this one. insertBefore = true; } else { // Copy the right text over to the new block. Array.Copy(_text, _gapOffset, newBlock._text, _gapOffset, MaxBlockSize - _gapOffset); Invariant.Assert(newBlock._gapOffset == 0); newBlock._gapSize = _gapOffset; // Remove the left text from this block. _gapSize = MaxBlockSize - _gapOffset; // New node follows this one. insertBefore = false; } // Add the new node to the splay tree. newBlock.InsertAtNode(this, insertBefore); return newBlock; } // Removes text at a logical offset (an offset that does not // consider the gap). internal void RemoveText(int logicalOffset, int count) { int precedingTextToRemoveCount; Invariant.Assert(logicalOffset >= 0); Invariant.Assert(count >= 0); Invariant.Assert(logicalOffset + count <= this.Count, "Removing too much text!"); int originalCountToRemove = count; int originalCount = this.Count; // Splay this node so we don't invalidate any LeftSymbolCounts. Splay(); // // Remove text before the gap. if (logicalOffset < _gapOffset) { if (logicalOffset + count < _gapOffset) { // Shift text over. MoveGap(logicalOffset + count); } // Extend the gap to "remove" the text. precedingTextToRemoveCount = (logicalOffset + count == _gapOffset) ? count : _gapOffset - logicalOffset; _gapOffset -= precedingTextToRemoveCount; _gapSize += precedingTextToRemoveCount; // Adjust logicalOffset, count, so that they follow the gap below. logicalOffset = _gapOffset; count -= precedingTextToRemoveCount; } // Make offset relative to text after the gap. logicalOffset += _gapSize; // Remove text after the gap. if (logicalOffset > _gapOffset + _gapSize) { // Shift text over. MoveGap(logicalOffset - _gapSize); } // Extend the gap to "remove" the text. _gapSize += count; Invariant.Assert(_gapOffset + _gapSize <= _text.Length); Invariant.Assert(originalCount == this.Count + originalCountToRemove); } // Copies text into a char array, returns the actual count of chars copied, // which may be smaller than count if the end of block is encountered. internal int ReadText(int logicalOffset, int count, char []chars, int charsStartIndex) { int copyCount; int originalCount; originalCount = count; // Read text before the gap. if (logicalOffset < _gapOffset) { copyCount = Math.Min(count, _gapOffset - logicalOffset); Array.Copy(_text, logicalOffset, chars, charsStartIndex, copyCount); count -= copyCount; charsStartIndex += copyCount; // Adjust logicalOffset, so that it will follow the gap below. logicalOffset = _gapOffset; } if (count > 0) { // Make offset relative to text after the gap. logicalOffset += _gapSize; // Read the text following the gap. copyCount = Math.Min(count, _text.Length - logicalOffset); Array.Copy(_text, logicalOffset, chars, charsStartIndex, copyCount); count -= copyCount; } return (originalCount - count); } #endregion Internal methods //------------------------------------------------------ // // Internal Properties // //------------------------------------------------------ #region Internal Properties // If this node is a local root, then ParentNode contains it. // Otherwise, this is the node parenting this node within its tree. internal override SplayTreeNode ParentNode { get { return _parentNode; } set { _parentNode = value; } } // TextTreeTextNode never has contained nodes. internal override SplayTreeNode ContainedNode { get { return null; } set { Invariant.Assert(false, "Can't set ContainedNode on a TextTreeTextBlock!"); } } // Count of symbols of all siblings preceding this node. internal override int LeftSymbolCount { get { return _leftSymbolCount; } set { _leftSymbolCount = value; } } // Count of unicode chars of all siblings preceding this node. // This property is only used by TextTreeNodes. internal override int LeftCharCount { get { return 0; } set { Invariant.Assert(value == 0); } } // Left child node in a sibling tree. internal override SplayTreeNode LeftChildNode { get { return _leftChildNode; } set { _leftChildNode = (TextTreeTextBlock)value; } } // Right child node in a sibling tree. internal override SplayTreeNode RightChildNode { get { return _rightChildNode; } set { _rightChildNode = (TextTreeTextBlock)value; } } // Unused by this derived class. internal override uint Generation { get { return 0; } set { Invariant.Assert(false, "TextTreeTextBlock does not track Generation!"); } } // Cached symbol offset. // Unused by this derived class. internal override int SymbolOffsetCache { get { return -1; } set { Invariant.Assert(false, "TextTreeTextBlock does not track SymbolOffsetCache!"); } } // Count of symbols covered by this node. internal override int SymbolCount { get { return this.Count; } set { Invariant.Assert(false, "Can't set SymbolCount on TextTreeTextBlock!"); } } // Count of unicode chars covered by this node and any contained nodes. // This property is only used by TextTreeNodes. internal override int IMECharCount { get { return 0; } set { Invariant.Assert(value == 0); } } // The number of chars stored in this block. internal int Count { get { return (_text.Length - _gapSize); } } // The number of additional chars this Block could accept before running out of space. internal int FreeCapacity { get { return _gapSize; } } // The offset of the gap in this block. internal int GapOffset { get { return _gapOffset; } } #endregion Internal Properties //------------------------------------------------------ // // Private Methods // //------------------------------------------------------ #region Private Methods // Repositions the gap to a new offset, shifting text as necessary. private void MoveGap(int offset) { int sourceOffset; int destinationOffset; int count; if (offset < _gapOffset) { sourceOffset = offset; destinationOffset = offset + _gapSize; count = _gapOffset - offset; } else { sourceOffset = _gapOffset + _gapSize; destinationOffset = _gapOffset; count = offset - _gapOffset; } Array.Copy(_text, sourceOffset, _text, destinationOffset, count); _gapOffset = offset; } #endregion Private methods //------------------------------------------------------ // // Private Fields // //------------------------------------------------------ #region Private Fields // Count of symbols of all siblings preceding this node. private int _leftSymbolCount; // The TextTreeTextBlock parenting this node within its tree, // or the TextTreeRootTextBlock containing this node. private SplayTreeNode _parentNode; // Left child node in a sibling tree. private TextTreeTextBlock _leftChildNode; // Right child node in a sibling tree. private TextTreeTextBlock _rightChildNode; // An array of text in the block. private char []_text; // Position of the buffer gap. private int _gapOffset; // Size of the buffer gap. private int _gapSize; // Max block size, in chars. internal const int MaxBlockSize = 4096; #endregion Private Fields } }
using System; using System.Collections.Generic; using System.Text; using System.Text.RegularExpressions; using Exortech.NetReflector; using ThoughtWorks.CruiseControl.Core; using ThoughtWorks.CruiseControl.Remote; using VersionOne.SDK.ObjectModel; using VersionOne.SDK.ObjectModel.Filters; /** * VersionOne would like to thank Andreas Axelsson for his * contribution to improving this component. */ namespace VersionOne.Integration.CCNet { [ReflectorType("versionone")] public class VersionOnePublisher : ITask { private string url; [ReflectorProperty("url")] public string Url { get { return url.EndsWith("/") ? url : url + "//"; } set { url = value; } } private string username; [ReflectorProperty("username", Required = false)] public string Username { get { return username; } set { username = value; } } private string password; [ReflectorProperty("password", Required = false)] public string Password { get { return password; } set { password = value; } } private bool integrated; [ReflectorProperty("integratedauth", Required = false)] public bool Integrated { get { return integrated; } set { integrated = value; } } private bool useProxy; [ReflectorProperty("useproxy", Required = false)] public bool UseProxy { get { return useProxy; } set { useProxy = value; } } private string proxyUrl; [ReflectorProperty("proxyurl", Required = false)] public string ProxyUrl { get { return proxyUrl; } set { proxyUrl = value; } } private string proxyUsername; [ReflectorProperty("proxyusername", Required = false)] public string ProxyUsername { get { return proxyUsername; } set { proxyUsername = value; } } private string proxyPassword; [ReflectorProperty("proxypassword", Required = false)] public string ProxyPassword { get { return proxyPassword; } set { proxyPassword = value; } } private string proxyDomain; [ReflectorProperty("proxydomain", Required = false)] public string ProxyDomain { get { return proxyDomain; } set { proxyDomain = value; } } private string ccWebRoot; [ReflectorProperty("webroot", Required = false)] public string CcWebRoot { get { return ccWebRoot; } set { ccWebRoot = value; } } private string referenceExpression; /// <summary> /// Pattern used to match the VersionOne field on Workitems to check-in comments. /// </summary> [ReflectorProperty("referenceexpression", Required = false)] public string ReferenceExpression { get { return referenceExpression; } set { referenceExpression = value; } } private string referenceField; /// <summary> /// The name of the VersionOne field used to associate check-in comments with Workitems. /// </summary> [ReflectorProperty("referencefield", Required = false)] public string ReferenceField { get { return referenceField; } set { referenceField = value; } } private string ccBuildServer; [ReflectorProperty("server", Required = false)] public string CcBuildServer { get { return ccBuildServer; } set { ccBuildServer = value; } } private V1Instance instance; private V1Instance Instance { get { return instance ?? (instance = CreateV1Instance()); } } private V1Instance CreateV1Instance() { ProxySettings proxy = GetProxy(); V1Instance newV1 = new V1Instance(Url, Username, Password, Integrated, proxy); newV1.Validate(); return newV1; } private ProxySettings GetProxy() { if(!UseProxy) { return null; } Uri uri = new Uri(ProxyUrl); return new ProxySettings(uri, ProxyUsername, ProxyPassword, ProxyDomain); } public void Run(IIntegrationResult result) { BuildProject buildProject = GetBuildProject(result); if(buildProject != null) { IEnumerable<ChangeInfo> changes = ResolveChanges(result.Modifications); BuildRun run = buildProject.CreateBuildRun(result.ProjectName + " - " + result.Label, GetBuildDate(result)); run.Elapsed = GetElapsed(result); run.Reference = result.Label; run.Source.CurrentValue = DetermineSource(result); run.Status.CurrentValue = DetermineStatus(result); run.Description = GetModificationDescription(changes); run.Save(); string str = CreateBuildUrl(result); if (!string.IsNullOrEmpty(str)) { run.CreateLink("Build Report", str, true); } SetChangeSets(run, changes); } } /// <summary> /// Cull out a distinct set of changes from the CCNet Modifications collection. /// </summary> /// <param name="modifications">The CCNet Modifications collection.</param> /// <returns>A collection of ChangeInfo.</returns> private static IEnumerable<ChangeInfo> ResolveChanges(IEnumerable<Modification> modifications) { IDictionary<string, ChangeInfo> changes = new Dictionary<string, ChangeInfo>(); foreach (Modification mod in modifications) { if (!changes.ContainsKey(mod.ChangeNumber)) { changes.Add(mod.ChangeNumber, new ChangeInfo(mod)); } } return changes.Values; } private class ChangeInfo { public readonly string Number; public readonly string Comment; public readonly string User; public readonly DateTime Stamp; public ChangeInfo(Modification mod) { Number = mod.ChangeNumber; Comment = mod.Comment; User = mod.UserName; Stamp = mod.ModifiedTime; } } /// <summary> /// Associate this BuildRun with one or more ChangeSets. If there is no ChangeSet, then create one. /// </summary> /// <param name="run">This BuildRun.</param> /// <param name="changes">The changes as reported by CCNet.</param> private void SetChangeSets(BuildRun run, IEnumerable<ChangeInfo> changes) { foreach(ChangeInfo change in changes) { // See if we have this ChangeSet in the system. ChangeSetFilter filter = new ChangeSetFilter(); filter.Reference.Add(change.Number.ToString()); ICollection<ChangeSet> changeSets = Instance.Get.ChangeSets(filter); if(changeSets.Count == 0) { // We don't have one yet. Create one. string name = string.Format("{0} on {1}", change.User, change.Stamp); ChangeSet changeSet = Instance.Create.ChangeSet(name, change.Number.ToString()); changeSet.Description = change.Comment; changeSets = new List<ChangeSet>(); changeSets.Add(changeSet); Trace("Created new ChangeSet: {0}", name); } IEnumerable<PrimaryWorkitem> workitems = DetermineWorkitems(change); // Associate with our new BuildRun. foreach(ChangeSet changeSet in changeSets) { run.ChangeSets.Add(changeSet); foreach(PrimaryWorkitem workitem in workitems) { changeSet.PrimaryWorkitems.Add(workitem); //workitem.CompletedIn.Clear(); List<BuildRun> toRemove = new List<BuildRun>(); foreach (BuildRun otherRun in workitem.CompletedIn) { if (otherRun.BuildProject == run.BuildProject) { toRemove.Add(otherRun); } } foreach (BuildRun buildRun in toRemove) { workitem.CompletedIn.Remove(buildRun); } workitem.CompletedIn.Add(run); Trace("Associated ChangeSet with PrimaryWorkitem: {0}", workitem.ID); } } } } /// <summary> /// Pull the workitem numbers out of the change comments. /// </summary> /// <param name="change">The CCNet change record.</param> /// <returns>A collection of affected PrimaryWorkitems.</returns> private IEnumerable<PrimaryWorkitem> DetermineWorkitems(ChangeInfo change) { List<PrimaryWorkitem> result = new List<PrimaryWorkitem>(); if(!string.IsNullOrEmpty(ReferenceExpression) && !string.IsNullOrEmpty(ReferenceField)) { Regex expression = new Regex(ReferenceExpression); foreach(Match match in expression.Matches(change.Comment)) result.AddRange(ResolveReference(match.Value)); } else { Trace("Either referenceexpression ({0}) or referencefield ({1}) not set in config file.", ReferenceExpression, ReferenceField); } return result; } /// <summary> /// Resolve a check-in comment identifier to a PrimaryWorkitem. /// </summary> /// <param name="reference">The identifier in the check-in comment.</param> /// <returns>A collection of matching PrimaryWorkitems.</returns> /// <remarks>If the reference matches a SecondaryWorkitem, we need to navigate to the parent.</remarks> private IEnumerable<PrimaryWorkitem> ResolveReference(string reference) { List<PrimaryWorkitem> result = new List<PrimaryWorkitem>(); WorkitemFilter filter = new WorkitemFilter(); filter.Find.SearchString = reference; filter.Find.Fields.Add(ReferenceField); IEnumerable<Workitem> workitems = Instance.Get.Workitems(filter); foreach(Workitem workitem in workitems) { if(workitem is PrimaryWorkitem) { result.Add((PrimaryWorkitem)workitem); } else if(workitem is SecondaryWorkitem) { result.Add((PrimaryWorkitem)((SecondaryWorkitem)workitem).Parent); } else { // Shut 'er down, Clancy, she's pumping mud. throw new ApplicationException(string.Format("Found unexpected Workitem type: {0}", workitem.GetType())); } } Trace("Associated {0} PrimaryWorkitem(s) with {1}.", result.Count, reference); return result; } private static string GetModificationDescription(IEnumerable<ChangeInfo> changes) { StringBuilder result = new StringBuilder(); foreach (ChangeInfo change in changes) { result.Append(string.Format("{0}: {1}<br>", change.User, change.Comment)); } return result.ToString(); } private static DateTime GetBuildDate(IIntegrationResult result) { return ((result.EndTime == DateTime.MinValue) ? DateTime.Now : result.EndTime); } private static double GetElapsed(IIntegrationResult result) { DateTime time = (result.EndTime == DateTime.MinValue) ? DateTime.Now : result.EndTime; TimeSpan span = time - result.StartTime; return span.TotalMilliseconds; } private static string DetermineStatus(IIntegrationResult result) { switch(result.Status) { case IntegrationStatus.Success: return "Passed"; case IntegrationStatus.Failure: return "Failed"; default: return null; } } private static string DetermineSource(IIntegrationResult result) { switch(result.BuildCondition) { case BuildCondition.ForceBuild: return "Forced"; case BuildCondition.IfModificationExists: return "Trigger"; default: return null; } } public string CreateBuildUrl(IIntegrationResult result) { if(CcWebRoot == null) { return null; } List<string> parts = new List<string>(); string buildTime = (string)result.IntegrationProperties["CCNetBuildTime"]; string buildDate = (string)result.IntegrationProperties["CCNetBuildDate"]; string buildProject = (string)result.IntegrationProperties["CCNetProject"]; string buildLabel = (string)result.IntegrationProperties["CCNetLabel"]; buildTime = buildTime.Replace(":", string.Empty); buildDate = buildDate.Replace("-", string.Empty); string file = "log" + buildDate + buildTime + (result.Succeeded ? "Lbuild." + buildLabel : string.Empty) + ".xml"; parts.Add(CcWebRoot); if (!CcWebRoot.EndsWith("/")) { parts.Add("/"); } parts.Add("server/"); parts.Add(String.IsNullOrEmpty(CcBuildServer) ? "local" : CcBuildServer); parts.Add("/project/"); parts.Add(buildProject); parts.Add("/build/"); parts.Add(file); parts.Add("/ViewBuildReport.aspx"); return string.Join(string.Empty, parts.ToArray()); } /// <summary> /// Find the first BuildProject where the Reference matches the result.ProjectName. /// </summary> /// <param name="result">The CC integration result.</param> /// <returns>The BuildProject if we have a match; otherwise, null.</returns> private BuildProject GetBuildProject(IIntegrationResult result) { BuildProjectFilter filter = new BuildProjectFilter(); filter.References.Add(result.ProjectName); filter.State.Add(State.Active); ICollection<BuildProject> projects = Instance.Get.BuildProjects(filter); foreach (BuildProject project in projects) { return project; } Trace("Couldn't find BuildProject for {0}", result.ProjectName); return null; } private static void Trace(string format, params object[] args) { System.Diagnostics.Trace.WriteLine(string.Format(format, args)); } } }
/* * UltraCart Rest API V2 * * UltraCart REST API Version 2 * * OpenAPI spec version: 2.0.0 * Contact: [email protected] * Generated by: https://github.com/swagger-api/swagger-codegen.git */ using System; using System.Linq; using System.IO; using System.Text; using System.Text.RegularExpressions; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Runtime.Serialization; using Newtonsoft.Json; using Newtonsoft.Json.Converters; using System.ComponentModel.DataAnnotations; using SwaggerDateConverter = com.ultracart.admin.v2.Client.SwaggerDateConverter; namespace com.ultracart.admin.v2.Model { /// <summary> /// PaymentsConfigurationSezzle /// </summary> [DataContract] public partial class PaymentsConfigurationSezzle : IEquatable<PaymentsConfigurationSezzle>, IValidatableObject { /// <summary> /// Sezzle environment /// </summary> /// <value>Sezzle environment</value> [JsonConverter(typeof(StringEnumConverter))] public enum EnvironmentEnum { /// <summary> /// Enum Live for value: Live /// </summary> [EnumMember(Value = "Live")] Live = 1, /// <summary> /// Enum Sandbox for value: Sandbox /// </summary> [EnumMember(Value = "Sandbox")] Sandbox = 2 } /// <summary> /// Sezzle environment /// </summary> /// <value>Sezzle environment</value> [DataMember(Name="environment", EmitDefaultValue=false)] public EnvironmentEnum? Environment { get; set; } /// <summary> /// Initializes a new instance of the <see cref="PaymentsConfigurationSezzle" /> class. /// </summary> /// <param name="acceptSezzle">Master flag for this merchant accepting Sezzle payments.</param> /// <param name="accountingCode">Optional Quickbooks code for this payment method.</param> /// <param name="businessId">Business ID.</param> /// <param name="depositToAccount">Optional Quickbooks Deposit to Account value.</param> /// <param name="environment">Sezzle environment.</param> /// <param name="privateApiKey">Private API key.</param> /// <param name="publicApiKey">Public API key.</param> /// <param name="restrictions">restrictions.</param> public PaymentsConfigurationSezzle(bool? acceptSezzle = default(bool?), string accountingCode = default(string), string businessId = default(string), string depositToAccount = default(string), EnvironmentEnum? environment = default(EnvironmentEnum?), string privateApiKey = default(string), string publicApiKey = default(string), PaymentsConfigurationRestrictions restrictions = default(PaymentsConfigurationRestrictions)) { this.AcceptSezzle = acceptSezzle; this.AccountingCode = accountingCode; this.BusinessId = businessId; this.DepositToAccount = depositToAccount; this.Environment = environment; this.PrivateApiKey = privateApiKey; this.PublicApiKey = publicApiKey; this.Restrictions = restrictions; } /// <summary> /// Master flag for this merchant accepting Sezzle payments /// </summary> /// <value>Master flag for this merchant accepting Sezzle payments</value> [DataMember(Name="accept_sezzle", EmitDefaultValue=false)] public bool? AcceptSezzle { get; set; } /// <summary> /// Optional Quickbooks code for this payment method /// </summary> /// <value>Optional Quickbooks code for this payment method</value> [DataMember(Name="accounting_code", EmitDefaultValue=false)] public string AccountingCode { get; set; } /// <summary> /// Business ID /// </summary> /// <value>Business ID</value> [DataMember(Name="business_id", EmitDefaultValue=false)] public string BusinessId { get; set; } /// <summary> /// Optional Quickbooks Deposit to Account value /// </summary> /// <value>Optional Quickbooks Deposit to Account value</value> [DataMember(Name="deposit_to_account", EmitDefaultValue=false)] public string DepositToAccount { get; set; } /// <summary> /// Private API key /// </summary> /// <value>Private API key</value> [DataMember(Name="private_api_key", EmitDefaultValue=false)] public string PrivateApiKey { get; set; } /// <summary> /// Public API key /// </summary> /// <value>Public API key</value> [DataMember(Name="public_api_key", EmitDefaultValue=false)] public string PublicApiKey { get; set; } /// <summary> /// Gets or Sets Restrictions /// </summary> [DataMember(Name="restrictions", EmitDefaultValue=false)] public PaymentsConfigurationRestrictions Restrictions { get; set; } /// <summary> /// Returns the string presentation of the object /// </summary> /// <returns>String presentation of the object</returns> public override string ToString() { var sb = new StringBuilder(); sb.Append("class PaymentsConfigurationSezzle {\n"); sb.Append(" AcceptSezzle: ").Append(AcceptSezzle).Append("\n"); sb.Append(" AccountingCode: ").Append(AccountingCode).Append("\n"); sb.Append(" BusinessId: ").Append(BusinessId).Append("\n"); sb.Append(" DepositToAccount: ").Append(DepositToAccount).Append("\n"); sb.Append(" Environment: ").Append(Environment).Append("\n"); sb.Append(" PrivateApiKey: ").Append(PrivateApiKey).Append("\n"); sb.Append(" PublicApiKey: ").Append(PublicApiKey).Append("\n"); sb.Append(" Restrictions: ").Append(Restrictions).Append("\n"); sb.Append("}\n"); return sb.ToString(); } /// <summary> /// Returns the JSON string presentation of the object /// </summary> /// <returns>JSON string presentation of the object</returns> public virtual string ToJson() { return JsonConvert.SerializeObject(this, Formatting.Indented); } /// <summary> /// Returns true if objects are equal /// </summary> /// <param name="input">Object to be compared</param> /// <returns>Boolean</returns> public override bool Equals(object input) { return this.Equals(input as PaymentsConfigurationSezzle); } /// <summary> /// Returns true if PaymentsConfigurationSezzle instances are equal /// </summary> /// <param name="input">Instance of PaymentsConfigurationSezzle to be compared</param> /// <returns>Boolean</returns> public bool Equals(PaymentsConfigurationSezzle input) { if (input == null) return false; return ( this.AcceptSezzle == input.AcceptSezzle || (this.AcceptSezzle != null && this.AcceptSezzle.Equals(input.AcceptSezzle)) ) && ( this.AccountingCode == input.AccountingCode || (this.AccountingCode != null && this.AccountingCode.Equals(input.AccountingCode)) ) && ( this.BusinessId == input.BusinessId || (this.BusinessId != null && this.BusinessId.Equals(input.BusinessId)) ) && ( this.DepositToAccount == input.DepositToAccount || (this.DepositToAccount != null && this.DepositToAccount.Equals(input.DepositToAccount)) ) && ( this.Environment == input.Environment || (this.Environment != null && this.Environment.Equals(input.Environment)) ) && ( this.PrivateApiKey == input.PrivateApiKey || (this.PrivateApiKey != null && this.PrivateApiKey.Equals(input.PrivateApiKey)) ) && ( this.PublicApiKey == input.PublicApiKey || (this.PublicApiKey != null && this.PublicApiKey.Equals(input.PublicApiKey)) ) && ( this.Restrictions == input.Restrictions || (this.Restrictions != null && this.Restrictions.Equals(input.Restrictions)) ); } /// <summary> /// Gets the hash code /// </summary> /// <returns>Hash code</returns> public override int GetHashCode() { unchecked // Overflow is fine, just wrap { int hashCode = 41; if (this.AcceptSezzle != null) hashCode = hashCode * 59 + this.AcceptSezzle.GetHashCode(); if (this.AccountingCode != null) hashCode = hashCode * 59 + this.AccountingCode.GetHashCode(); if (this.BusinessId != null) hashCode = hashCode * 59 + this.BusinessId.GetHashCode(); if (this.DepositToAccount != null) hashCode = hashCode * 59 + this.DepositToAccount.GetHashCode(); if (this.Environment != null) hashCode = hashCode * 59 + this.Environment.GetHashCode(); if (this.PrivateApiKey != null) hashCode = hashCode * 59 + this.PrivateApiKey.GetHashCode(); if (this.PublicApiKey != null) hashCode = hashCode * 59 + this.PublicApiKey.GetHashCode(); if (this.Restrictions != null) hashCode = hashCode * 59 + this.Restrictions.GetHashCode(); return hashCode; } } /// <summary> /// To validate all properties of the instance /// </summary> /// <param name="validationContext">Validation context</param> /// <returns>Validation Result</returns> IEnumerable<System.ComponentModel.DataAnnotations.ValidationResult> IValidatableObject.Validate(ValidationContext validationContext) { yield break; } } }
namespace FakeItEasy.Specs { using System; using System.Diagnostics.CodeAnalysis; using FakeItEasy.Configuration; using FakeItEasy.Tests.TestHelpers; using FluentAssertions; using Xbehave; using Xunit; public static class ConfiguringPropertySetterSpecs { public interface IHaveInterestingProperties { int ReadWriteProperty { get; set; } int ReadOnlyProperty { get; } [SuppressMessage("Microsoft.Design", "CA1023:IndexersShouldNotBeMultidimensional", Justification = "Required for testing.")] bool this[string genus, string species] { get; set; } int this[string commonName] { get; set; } string this[int count] { get; } int MethodThatLooksLikeAPropertyGetter(); } public interface IHaveInterestingDerivedProperties : IHaveInterestingProperties { } public interface IHaveAReadWriteProperty { int ReadWriteProperty { get; set; } } public interface IAmbiguousDerived : IHaveInterestingProperties, IHaveAReadWriteProperty { } [Scenario] public static void ConfiguringNonConfigurableSetter( ClassWithInterestingProperties subject, Exception exception) { "Given a Fake with a property that can't be configured" .x(() => subject = A.Fake<ClassWithInterestingProperties>()); "When assignment of the property is configured" .x(() => exception = Record.Exception(() => A.CallToSet(() => subject.NonConfigurableProperty).DoesNothing())); "Then a fake configuration exception is thrown" .x(() => exception.Should().BeAnExceptionOfType<FakeConfigurationException>()); } [Scenario] public static void ConfiguringSetterForReadOnlyProperty( IHaveInterestingProperties subject, Exception exception) { "Given a Fake with a read-only property" .x(() => subject = A.Fake<IHaveInterestingProperties>()); "When assignment of the property is configured" .x(() => exception = Record.Exception(() => A.CallToSet(() => subject.ReadOnlyProperty).DoesNothing())); "Then an argument exception is thrown" .x(() => exception.Should().BeAnExceptionOfType<ArgumentException>()); "And the exception message indicates that the property is read-only" .x(() => exception.Message.Should().Be( $"The property {nameof(IHaveInterestingProperties.ReadOnlyProperty)} does not have a setter.")); } [Scenario] public static void ConfiguringSetterViaMethod( IHaveInterestingProperties subject, Exception exception) { "Given a Fake with a property" .x(() => subject = A.Fake<IHaveInterestingProperties>()); "When assignment of the property is configured using a method call expression" .x(() => exception = Record.Exception(() => A.CallToSet(() => subject.MethodThatLooksLikeAPropertyGetter()).DoesNothing())); "Then an argument exception is thrown" .x(() => exception.Should().BeAnExceptionOfType<ArgumentException>()); "And the exception message indicates that the expression refers to an incorrect member type" .x(() => exception.Message.Should().EndWith("' must refer to a property or indexer getter, but doesn't.")); } [Scenario] public static void ConfiguringSetterViaField( ClassWithInterestingProperties subject, Exception exception) { "Given a Fake with a property" .x(() => subject = A.Fake<ClassWithInterestingProperties>()); "When assignment of the property is configured using a field access expression" .x(() => exception = Record.Exception(() => A.CallToSet(() => subject.Field).DoesNothing())); "Then an argument exception is thrown" .x(() => exception.Should().BeAnExceptionOfType<ArgumentException>()); "And the exception message indicates that the expression refers to an incorrect member type" .x(() => exception.Message.Should().Be("The specified expression is not a method call or property getter.")); } [Scenario] [Example(int.MinValue)] [Example(-42)] [Example(0)] [Example(42)] [Example(int.MaxValue)] public static void ConfiguringSetterForAnyValue( int value, IHaveInterestingProperties subject, bool wasConfiguredBehaviorUsed) { "Given a Fake with a property" .x(() => subject = A.Fake<IHaveInterestingProperties>()); "And assignment of the property is configured for any value" .x(() => A.CallToSet(() => subject.ReadWriteProperty).Invokes(call => wasConfiguredBehaviorUsed = true)); $"When I assign the property to {value}" .x(() => subject.ReadWriteProperty = value); "Then the configured behavior is used" .x(() => wasConfiguredBehaviorUsed.Should().BeTrue()); } [Scenario] [Example(int.MinValue)] [Example(-42)] [Example(0)] [Example(42)] [Example(int.MaxValue)] public static void ConfiguringSetterToThrowForAnyValue( int value, IHaveInterestingProperties subject, Exception exception) { "Given a Fake with a property" .x(() => subject = A.Fake<IHaveInterestingProperties>()); "And assignment of the property is configured to throw an exception for any value" .x(() => A.CallToSet(() => subject.ReadWriteProperty).Throws(new InvalidOperationException("oops"))); $"When I assign the property to {value}" .x(() => exception = Record.Exception(() => subject.ReadWriteProperty = value)); "Then it throws the configured exception" .x(() => exception.Should().BeAnExceptionOfType<InvalidOperationException>().Which.Message.Should().Be("oops")); } [Scenario] [Example(int.MinValue)] [Example(-42)] [Example(0)] [Example(42)] [Example(int.MaxValue)] public static void ConfiguringSetterToDoNothingForAnyValue( int value, IHaveInterestingProperties subject) { "Given a Fake with a property" .x(() => subject = A.Fake<IHaveInterestingProperties>()); "And assignment of the property is configured to do nothing for any value" .x(() => A.CallToSet(() => subject.ReadWriteProperty).DoesNothing()); $"When I assign the property to {value}" .x(() => subject.ReadWriteProperty = value); "Then the default behavior is suppressed and the assigned value is not returned" .x(() => subject.ReadWriteProperty.Should().Be(0)); } [Scenario] [Example(int.MinValue)] [Example(-42)] [Example(0)] [Example(42)] [Example(int.MaxValue)] public static void ConfiguringSetterToCallBaseMethodForAnyValue( int value, ClassWithInterestingProperties subject) { "Given a Fake with a property" .x(() => subject = A.Fake<ClassWithInterestingProperties>()); "And assignment of the property is configured to call the base implementation for any value" .x(() => A.CallToSet(() => subject.ConfigurableProperty).CallsBaseMethod()); $"When I assign the property to {value}" .x(() => subject.ConfigurableProperty = value); "Then the base implementation is called" .x(() => subject.WasBaseSetterCalled.Should().BeTrue()); } [Scenario] public static void ConfigureIndexerWithWrongIndexes( IHaveInterestingProperties subject, bool wasConfiguredBehaviorUsed) { "Given a Fake with an indexer" .x(() => subject = A.Fake<IHaveInterestingProperties>()); "And assignment of the indexer is configured" .x(() => A.CallToSet(() => subject["Choeropsis", "liberiensis"]).Invokes(call => wasConfiguredBehaviorUsed = true)); "When I assign the property using the wrong indexes" .x(() => subject["Eleoniscus", "helenae"] = false); "Then the configured behavior is not used" .x(() => wasConfiguredBehaviorUsed.Should().BeFalse()); } [Scenario] public static void ConfigureIndexerWithMatchingIndexes( IHaveInterestingProperties subject, bool wasConfiguredBehaviorUsed) { "Given a Fake with an indexer" .x(() => subject = A.Fake<IHaveInterestingProperties>()); "And assignment of the indexer is configured" .x(() => A.CallToSet(() => subject["Choeropsis", "liberiensis"]).Invokes(call => wasConfiguredBehaviorUsed = true)); "When I assign the property using matching indexes" .x(() => subject["Choeropsis", "liberiensis"] = false); "Then the configured behavior is used" .x(() => wasConfiguredBehaviorUsed.Should().BeTrue()); } [Scenario] public static void ConfigureOverloadedIndexer( IHaveInterestingProperties subject, bool wasFirstConfiguredBehaviorUsed, bool wasSecondConfiguredBehaviorUsed) { "Given a Fake with an overloaded indexer" .x(() => subject = A.Fake<IHaveInterestingProperties>()); "And assignment of the indexer is configured for one signature" .x(() => A.CallToSet(() => subject["Choeropsis", "liberiensis"]).Invokes(call => wasFirstConfiguredBehaviorUsed = true)); "And assignment of the indexer is configured for the other signature" .x(() => A.CallToSet(() => subject["Pygmy hippopotamus"]).Invokes(call => wasSecondConfiguredBehaviorUsed = true)); "When I assign the property using one signature" .x(() => subject["Choeropsis", "liberiensis"] = true); "And I assign the property using the other signature" .x(() => subject["Pygmy hippopotamus"] = 4); "Then the configured behavior is used for the first signature" .x(() => wasFirstConfiguredBehaviorUsed.Should().BeTrue()); "And the configured behavior is used for the second signature" .x(() => wasSecondConfiguredBehaviorUsed.Should().BeTrue()); } [Scenario] public static void ConfigureReadOnlyIndexer( IHaveInterestingProperties subject, Exception exception) { "Given a Fake with a read-only indexer" .x(() => subject = A.Fake<IHaveInterestingProperties>()); "When assignment of the indexer is configured" .x(() => exception = Record.Exception(() => A.CallToSet(() => subject[7]).DoesNothing())); "Then an argument exception is thrown" .x(() => exception.Should().BeAnExceptionOfType<ArgumentException>()); "And the exception message indicates that the property is read only" .x(() => exception.Message.Should().EndWith("refers to an indexed property that does not have a setter.")); } [Scenario] public static void OverrideIndexerConfigurationWithWhenArgumentsMatch( IHaveInterestingProperties subject, bool wasConfiguredBehaviorUsed) { "Given a Fake with an indexer" .x(() => subject = A.Fake<IHaveInterestingProperties>()); "And assignment of the indexer is configured using WhenArgumentsMatch" .x(() => A.CallToSet(() => subject["Choeropsis", "liberiensis"]) .WhenArgumentsMatch(arguments => arguments.Get<string>("genus") == "Canis") .Invokes(call => wasConfiguredBehaviorUsed = true)); "When I assign the property using indexes that satisfy the WhenArgumentsMatch" .x(() => subject["Canis", "lupus"] = false); "Then the configured behavior is used" .x(() => wasConfiguredBehaviorUsed.Should().BeTrue()); } [Scenario] public static void OverridePropertyValueConfigurationWithWhenArgumentsMatchAndCallWithGoodValue( IHaveInterestingProperties subject, bool wasConfiguredBehaviorUsed) { "Given a Fake with a property" .x(() => subject = A.Fake<IHaveInterestingProperties>()); "And assignment of the property is configured for value 3 using WhenArgumentsMatch" .x(() => A.CallToSet(() => subject.ReadWriteProperty) .WhenArgumentsMatch(arguments => arguments.Get<int>(0) == 3) .Invokes(call => wasConfiguredBehaviorUsed = true)); "When I assign the property value to 3" .x(() => subject.ReadWriteProperty = 3); "Then the configured behavior is not used" .x(() => wasConfiguredBehaviorUsed.Should().BeTrue()); } [Scenario] public static void OverridePropertyValueConfigurationWithWhenArgumentsMatchAndCallWithBadValue( IHaveInterestingProperties subject, bool wasConfiguredBehaviorUsed) { "Given a Fake with a property" .x(() => subject = A.Fake<IHaveInterestingProperties>()); "And assignment of the property is configured for value 3 using WhenArgumentsMatch" .x(() => A.CallToSet(() => subject.ReadWriteProperty) .WhenArgumentsMatch(arguments => arguments.Get<int>(0) == 3) .Invokes(call => wasConfiguredBehaviorUsed = true)); "When I assign the property value to 4" .x(() => subject.ReadWriteProperty = 4); "Then the configured behavior is not used" .x(() => wasConfiguredBehaviorUsed.Should().BeFalse()); } [Scenario] public static void OverrideIndexerConfigurationWithWithAnyArguments( IHaveInterestingProperties subject, bool wasConfiguredBehaviorUsed) { "Given a Fake with an indexer" .x(() => subject = A.Fake<IHaveInterestingProperties>()); "And assignment of the indexer is configured for specific arguments and overridden with WithAnyArguments" .x(() => A.CallToSet(() => subject["Choeropsis", "liberiensis"]) .WithAnyArguments() .Invokes(call => wasConfiguredBehaviorUsed = true)); "When I assign the property using the wrong indexes" .x(() => subject["Eleoniscus", "helenae"] = false); "Then the configured behavior is used" .x(() => wasConfiguredBehaviorUsed.Should().BeTrue()); } [Scenario] public static void ConfiguringSetterWithExactValueAndAssigningThatValue( IHaveInterestingProperties subject, bool wasConfiguredBehaviorUsed) { "Given a Fake with a property" .x(() => subject = A.Fake<IHaveInterestingProperties>()); "And assignment of the property is configured for a specific value" .x(() => A.CallToSet(() => subject.ReadWriteProperty).To(5) .Invokes(call => wasConfiguredBehaviorUsed = true)); "When I assign the property to the matching value" .x(() => subject.ReadWriteProperty = 5); "Then the configured behavior is used" .x(() => wasConfiguredBehaviorUsed.Should().BeTrue()); } [Scenario] public static void ConfiguringSetterWithExactValueAndAssigningDifferentValue( IHaveInterestingProperties subject, bool wasConfiguredBehaviorUsed) { "Given a Fake with a property" .x(() => subject = A.Fake<IHaveInterestingProperties>()); "And assignment of the property is configured for a specific value" .x(() => A.CallToSet(() => subject.ReadWriteProperty).To(5) .Invokes(call => wasConfiguredBehaviorUsed = true)); "When I assign the property to a different value" .x(() => subject.ReadWriteProperty = -13); "Then the configured behavior is not used" .x(() => wasConfiguredBehaviorUsed.Should().BeFalse()); } [Scenario] [Example(4, "not used")] [Example(5, "used")] public static void ConfiguringSetterWithValueSpecificationAndAssigningMatching( int actualValue, string fateOfConfiguredBehavior, IHaveInterestingProperties subject, bool wasConfiguredBehaviorUsed) { "Given a Fake with a property" .x(() => subject = A.Fake<IHaveInterestingProperties>()); "And assignment of the property is configured for values greater than 4" .x(() => A.CallToSet(() => subject.ReadWriteProperty).To(() => A<int>.That.IsGreaterThan(4)) .Invokes(call => wasConfiguredBehaviorUsed = true)); "When I assign the property to the value {0}" .x(() => subject.ReadWriteProperty = actualValue); "Then the configured behavior is {1}" .x(() => wasConfiguredBehaviorUsed.Should().Be(fateOfConfiguredBehavior == "used")); } [Scenario] public static void ConfiguringSetterFromBaseInterfaceOnFakedInterface( IHaveInterestingDerivedProperties subject, bool wasConfiguredBehaviorUsed) { "Given an interface with a read/write property" .See<IHaveInterestingProperties>(); "And an interface that inherits it" .See<IHaveInterestingDerivedProperties>(); "And a Fake created from the derived interface" .x(() => subject = A.Fake<IHaveInterestingDerivedProperties>()); "And assignment of the property is configured for any value" .x(() => A.CallToSet(() => subject.ReadWriteProperty).Invokes(call => wasConfiguredBehaviorUsed = true)); "When I assign a value to the property" .x(() => subject.ReadWriteProperty = 0); "Then the configured behavior is used" .x(() => wasConfiguredBehaviorUsed.Should().BeTrue()); } [Scenario] public static void ConfiguringSetterFromBaseInterfaceOnFakedInterfaceOnBaseInterfaceTypedVar( IHaveInterestingProperties subject, bool wasConfiguredBehaviorUsed) { "Given an interface with a read/write property" .See<IHaveInterestingProperties>(); "And an interface that inherits it" .See<IHaveInterestingDerivedProperties>(); "And a Fake created from the derived interface but assigned to a variable of the interface's type" .x(() => subject = A.Fake<IHaveInterestingDerivedProperties>()); "And assignment of the property is configured for any value" .x(() => A.CallToSet(() => subject.ReadWriteProperty).Invokes(call => wasConfiguredBehaviorUsed = true)); "When I assign a value to the property" .x(() => subject.ReadWriteProperty = 0); "Then the configured behavior is used" .x(() => wasConfiguredBehaviorUsed.Should().BeTrue()); } [Scenario] public static void ConfiguringSetterFromBaseInterfaceOnFakedClass( ClassImplementingInterfaceWithInterestingProperties subject, bool wasConfiguredBehaviorUsed) { "Given an interface with a read/write property" .See<IHaveInterestingProperties>(); "And a class that implements it" .See<ClassImplementingInterfaceWithInterestingProperties>(); "And a Fake created from the class" .x(() => subject = A.Fake<ClassImplementingInterfaceWithInterestingProperties>()); "And assignment of the property is configured for any value" .x(() => A.CallToSet(() => subject.ReadWriteProperty).Invokes(call => wasConfiguredBehaviorUsed = true)); "When I assign a value to the property" .x(() => subject.ReadWriteProperty = 0); "Then the configured behavior is used" .x(() => wasConfiguredBehaviorUsed.Should().BeTrue()); } [Scenario] public static void ConfiguringSetterFromAmbiguousBaseInterface( IAmbiguousDerived subject, bool wasConfiguredBehaviorUsed) { "Given a interface that inherits from two interfaces that have a read/write property with the same name and signature" .See<IAmbiguousDerived>(); "And a fake created from the derived interface" .x(() => subject = A.Fake<IAmbiguousDerived>()); "And assignment of the property of the second base interface is configured for any value" .x(() => A.CallToSet(() => ((IHaveAReadWriteProperty)subject).ReadWriteProperty).Invokes(call => wasConfiguredBehaviorUsed = true)); "When I assign a value to the property" .x(() => ((IHaveAReadWriteProperty)subject).ReadWriteProperty = 0); "Then the configured behavior is used" .x(() => wasConfiguredBehaviorUsed.Should().BeTrue()); } [Scenario] public static void ConfiguringSetterFromBaseInterfaceOnFakedClassFromInterfaceTypedVar( IHaveInterestingProperties subject, bool wasConfiguredBehaviorUsed) { "Given an interface with a read/write property" .See<IHaveInterestingProperties>(); "And a class that implements it" .See<ClassImplementingInterfaceWithInterestingProperties>(); "And a Fake created from the class but assigned to a variable of the interface's type" .x(() => subject = A.Fake<ClassImplementingInterfaceWithInterestingProperties>()); "And assignment of the property is configured for any value" .x(() => A.CallToSet(() => subject.ReadWriteProperty).Invokes(call => wasConfiguredBehaviorUsed = true)); "When I assign a value to the property" .x(() => subject.ReadWriteProperty = 0); "Then the configured behavior is used" .x(() => wasConfiguredBehaviorUsed.Should().BeTrue()); } public class ClassWithInterestingProperties { [SuppressMessage("StyleCop.CSharp.MaintainabilityRules", "SA1401:FieldsMustBePrivate", Justification = "Required for testing.")] #pragma warning disable 649 internal int Field; #pragma warning restore 649 public int NonConfigurableProperty { get; set; } public virtual int ConfigurableProperty { get { return 0; } set { this.WasBaseSetterCalled = true; } } public bool WasBaseSetterCalled { get; private set; } } public class ClassImplementingInterfaceWithInterestingProperties : IHaveInterestingProperties { public virtual int ReadWriteProperty { get; set; } public virtual int ReadOnlyProperty { get; } = 7; public virtual bool this[string genus, string species] { get => false; set { } } public virtual int this[string commonName] { get => 3; set { } } public virtual string this[int count] => string.Empty; public virtual int MethodThatLooksLikeAPropertyGetter() => 0; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using System.Diagnostics; using Internal.TypeSystem; using ILCompiler; using LLVMSharp; using ILCompiler.CodeGen; using ILCompiler.DependencyAnalysis; using ILCompiler.DependencyAnalysisFramework; namespace Internal.IL { // Implements an IL scanner that scans method bodies to be compiled by the code generation // backend before the actual compilation happens to gain insights into the code. partial class ILImporter { ArrayBuilder<object> _dependencies = new ArrayBuilder<object>(); public IEnumerable<object> GetDependencies() { return _dependencies.ToArray(); } public LLVMModuleRef Module { get; } private readonly MethodDesc _method; private readonly MethodIL _methodIL; private readonly MethodSignature _signature; private readonly TypeDesc _thisType; private readonly WebAssemblyCodegenCompilation _compilation; private LLVMValueRef _llvmFunction; private LLVMBasicBlockRef _curBasicBlock; private LLVMBuilderRef _builder; private readonly LocalVariableDefinition[] _locals; private readonly byte[] _ilBytes; /// <summary> /// Stack of values pushed onto the IL stack: locals, arguments, values, function pointer, ... /// </summary> private EvaluationStack<StackEntry> _stack = new EvaluationStack<StackEntry>(0); private class BasicBlock { // Common fields public enum ImportState : byte { Unmarked, IsPending } public BasicBlock Next; public int StartOffset; public ImportState State = ImportState.Unmarked; public EvaluationStack<StackEntry> EntryStack; public bool TryStart; public bool FilterStart; public bool HandlerStart; public LLVMBasicBlockRef Block; } private class ExceptionRegion { public ILExceptionRegion ILRegion; } private ExceptionRegion[] _exceptionRegions; public ILImporter(WebAssemblyCodegenCompilation compilation, MethodDesc method, MethodIL methodIL, string mangledName) { Module = compilation.Module; _compilation = compilation; _method = method; _methodIL = methodIL; _ilBytes = methodIL.GetILBytes(); _locals = methodIL.GetLocals(); _signature = method.Signature; _thisType = method.OwningType; var ilExceptionRegions = methodIL.GetExceptionRegions(); _exceptionRegions = new ExceptionRegion[ilExceptionRegions.Length]; for (int i = 0; i < ilExceptionRegions.Length; i++) { _exceptionRegions[i] = new ExceptionRegion() { ILRegion = ilExceptionRegions[i] }; } _llvmFunction = GetOrCreateLLVMFunction(mangledName); _builder = LLVM.CreateBuilder(); } public void Import() { FindBasicBlocks(); try { ImportBasicBlocks(); } catch { // Change the function body to trap foreach (BasicBlock block in _basicBlocks) { if (block != null && block.Block.Pointer != IntPtr.Zero) { LLVM.DeleteBasicBlock(block.Block); } } LLVMBasicBlockRef trapBlock = LLVM.AppendBasicBlock(_llvmFunction, "Trap"); LLVM.PositionBuilderAtEnd(_builder, trapBlock); EmitTrapCall(); LLVM.BuildRetVoid(_builder); throw; } } private void GenerateProlog() { int totalLocalSize = 0; foreach(LocalVariableDefinition local in _locals) { int localSize = local.Type.GetElementSize().AsInt; totalLocalSize += localSize; } var sp = LLVM.GetFirstParam(_llvmFunction); int paramOffset = GetTotalParameterOffset(); for (int i = 0; i < totalLocalSize; i++) { var stackOffset = LLVM.BuildGEP(_builder, sp, new LLVMValueRef[] { LLVM.ConstInt(LLVM.Int32Type(), (ulong)(paramOffset + i), LLVMMisc.False) }, String.Empty); LLVM.BuildStore(_builder, LLVM.ConstInt(LLVM.Int8Type(), 0, LLVMMisc.False), stackOffset); } } private LLVMValueRef CreateLLVMFunction(string mangledName) { LLVMTypeRef universalSignature = LLVM.FunctionType(LLVM.VoidType(), new LLVMTypeRef[] { LLVM.PointerType(LLVM.Int8Type(), 0), LLVM.PointerType(LLVM.Int8Type(), 0) }, false); return LLVM.AddFunction(Module, mangledName , universalSignature); } private LLVMValueRef GetOrCreateLLVMFunction(string mangledName) { LLVMValueRef llvmFunction = LLVM.GetNamedFunction(Module, mangledName); if(llvmFunction.Pointer == IntPtr.Zero) { return CreateLLVMFunction(mangledName); } return llvmFunction; } /// <summary> /// Push an expression named <paramref name="name"/> of kind <paramref name="kind"/>. /// </summary> /// <param name="kind">Kind of entry in stack</param> /// <param name="name">Variable to be pushed</param> /// <param name="type">Type if any of <paramref name="name"/></param> private void PushExpression(StackValueKind kind, string name, LLVMValueRef llvmValue, TypeDesc type = null) { Debug.Assert(kind != StackValueKind.Unknown, "Unknown stack kind"); switch (kind) { case StackValueKind.Int32: { if (!type.IsWellKnownType(WellKnownType.Int32) && !type.IsWellKnownType(WellKnownType.IntPtr) && !type.IsWellKnownType(WellKnownType.UInt32) && !type.IsWellKnownType(WellKnownType.UIntPtr)) { llvmValue = LLVM.BuildIntCast(_builder, llvmValue, LLVM.Int32Type(), ""); } } break; case StackValueKind.Int64: { if (!type.IsWellKnownType(WellKnownType.Int64) && !(type.IsWellKnownType(WellKnownType.UInt64))) { llvmValue = LLVM.BuildIntCast(_builder, llvmValue, LLVM.Int64Type(), ""); } } break; case StackValueKind.NativeInt: break; } _stack.Push(new ExpressionEntry(kind, name, llvmValue, type)); } /// <summary> /// Generate a cast in case the stack type of source is not identical or compatible with destination type. /// </summary> /// <param name="destType">Type of destination</param> /// <param name="srcEntry">Source entry from stack</param> private void AppendCastIfNecessary(TypeDesc destType, StackEntry srcEntry) { ConstantEntry constant = srcEntry as ConstantEntry; if ((constant != null) && (constant.IsCastNecessary(destType)) || !destType.IsValueType || destType != srcEntry.Type) { throw new NotImplementedException(); /* Append("("); Append(GetSignatureTypeNameAndAddReference(destType)); Append(")");*/ } } private void AppendCastIfNecessary(StackValueKind dstType, TypeDesc srcType) { if (dstType == StackValueKind.ByRef) { throw new NotImplementedException(); /* Append("("); Append(GetSignatureTypeNameAndAddReference(srcType)); Append(")");*/ } else if (srcType.IsPointer) { throw new NotImplementedException(); //Append("(intptr_t)"); } } private void MarkInstructionBoundary() { } private LLVMBasicBlockRef GetLLVMBasicBlockForBlock(BasicBlock block) { if (block.Block.Pointer == IntPtr.Zero) { block.Block = LLVM.AppendBasicBlock(_llvmFunction, "Block" + block.StartOffset); } return block.Block; } private void StartImportingBasicBlock(BasicBlock basicBlock) { _stack.Clear(); EvaluationStack<StackEntry> entryStack = basicBlock.EntryStack; if (entryStack != null) { int n = entryStack.Length; for (int i = 0; i < n; i++) { _stack.Push(entryStack[i].Duplicate()); } } bool isFirstBlock = false; if(_curBasicBlock.Equals(default(LLVMBasicBlockRef))) { isFirstBlock = true; } _curBasicBlock = GetLLVMBasicBlockForBlock(basicBlock); LLVM.PositionBuilderAtEnd(_builder, _curBasicBlock); if(isFirstBlock) { GenerateProlog(); } } private void EndImportingBasicBlock(BasicBlock basicBlock) { var terminator = basicBlock.Block.GetBasicBlockTerminator(); if (terminator.Pointer == IntPtr.Zero) { LLVM.BuildBr(_builder, GetLLVMBasicBlockForBlock(_basicBlocks[_currentOffset])); } } private void StartImportingInstruction() { } private void EndImportingInstruction() { } private void ImportNop() { } private void ImportBreak() { } private void ImportLoadVar(int index, bool argument) { int varBase; int varCountBase; int varOffset; LLVMTypeRef valueType; TypeDesc type; if (argument) { varCountBase = 0; varBase = 0; if (!_signature.IsStatic) { varCountBase = 1; } GetArgSizeAndOffsetAtIndex(index, out int argSize, out varOffset); if (!_signature.IsStatic && index == 0) { type = _thisType; if (type.IsValueType) { type = type.MakeByRefType(); } } else { type = _signature[index - varCountBase]; } valueType = GetLLVMTypeForTypeDesc(type); } else { varBase = GetTotalParameterOffset(); GetLocalSizeAndOffsetAtIndex(index, out int localSize, out varOffset); valueType = GetLLVMTypeForTypeDesc(_locals[index].Type); type = _locals[index].Type; } var loadLocation = LLVM.BuildGEP(_builder, LLVM.GetFirstParam(_llvmFunction), new LLVMValueRef[] { LLVM.ConstInt(LLVM.Int32Type(), (uint)(varBase + varOffset), LLVMMisc.False) }, String.Empty); var typedLoadLocation = LLVM.BuildPointerCast(_builder, loadLocation, LLVM.PointerType(valueType, 0), String.Empty); var loadResult = LLVM.BuildLoad(_builder, typedLoadLocation, "ld" + (argument ? "arg" : "loc") + index + "_"); PushExpression(GetStackValueKind(type), String.Empty, loadResult, type); } private StackValueKind GetStackValueKind(TypeDesc type) { switch (type.Category) { case TypeFlags.Boolean: case TypeFlags.Char: case TypeFlags.SByte: case TypeFlags.Byte: case TypeFlags.Int16: case TypeFlags.UInt16: case TypeFlags.Int32: case TypeFlags.UInt32: return StackValueKind.Int32; case TypeFlags.Int64: case TypeFlags.UInt64: return StackValueKind.Int64; case TypeFlags.Single: case TypeFlags.Double: return StackValueKind.Float; case TypeFlags.IntPtr: case TypeFlags.UIntPtr: return StackValueKind.NativeInt; case TypeFlags.ValueType: case TypeFlags.Nullable: return StackValueKind.ValueType; case TypeFlags.Enum: return GetStackValueKind(type.UnderlyingType); case TypeFlags.Class: case TypeFlags.Interface: case TypeFlags.Array: case TypeFlags.SzArray: return StackValueKind.ObjRef; case TypeFlags.ByRef: return StackValueKind.ByRef; case TypeFlags.Pointer: return StackValueKind.NativeInt; default: return StackValueKind.Unknown; } } private void ImportStoreVar(int index, bool argument) { if(argument) { throw new NotImplementedException("storing to argument"); } GetLocalSizeAndOffsetAtIndex(index, out int localSize, out int localOffset); LLVMValueRef toStore = _stack.Pop().LLVMValue; LLVMTypeRef valueType = GetLLVMTypeForTypeDesc(_locals[index].Type); ImportStoreHelper(toStore, valueType, LLVM.GetFirstParam(_llvmFunction), (uint)(GetTotalParameterOffset() + localOffset)); } private void ImportStoreHelper(LLVMValueRef toStore, LLVMTypeRef valueType, LLVMValueRef basePtr, uint offset) { LLVMTypeKind toStoreKind = LLVM.GetTypeKind(LLVM.TypeOf(toStore)); LLVMTypeKind valueTypeKind = LLVM.GetTypeKind(valueType); LLVMValueRef typedToStore = toStore; if(toStoreKind == LLVMTypeKind.LLVMPointerTypeKind && valueTypeKind == LLVMTypeKind.LLVMPointerTypeKind) { typedToStore = LLVM.BuildPointerCast(_builder, toStore, valueType, "storePtrCast"); } else if(toStoreKind == LLVMTypeKind.LLVMPointerTypeKind && valueTypeKind != LLVMTypeKind.LLVMPointerTypeKind) { typedToStore = LLVM.BuildPtrToInt(_builder, toStore, valueType, "storeIntCast"); } else if (toStoreKind != LLVMTypeKind.LLVMPointerTypeKind && valueTypeKind == LLVMTypeKind.LLVMPointerTypeKind) { typedToStore = LLVM.BuildIntToPtr(_builder, toStore, valueType, "storePtrCast"); } else { Debug.Assert(toStoreKind != LLVMTypeKind.LLVMPointerTypeKind && valueTypeKind != LLVMTypeKind.LLVMPointerTypeKind); typedToStore = LLVM.BuildIntCast(_builder, toStore, valueType, "storeIntCast"); } var storeLocation = LLVM.BuildGEP(_builder, basePtr, new LLVMValueRef[] { LLVM.ConstInt(LLVM.Int32Type(), offset, LLVMMisc.False) }, String.Empty); var typedStoreLocation = LLVM.BuildPointerCast(_builder, storeLocation, LLVM.PointerType(valueType, 0), String.Empty); LLVM.BuildStore(_builder, typedToStore, typedStoreLocation); } private LLVMTypeRef GetLLVMTypeForTypeDesc(TypeDesc type) { switch (type.Category) { case TypeFlags.Boolean: return LLVM.Int1Type(); case TypeFlags.SByte: case TypeFlags.Byte: return LLVM.Int8Type(); case TypeFlags.Int16: case TypeFlags.UInt16: case TypeFlags.Char: return LLVM.Int16Type(); case TypeFlags.Int32: case TypeFlags.UInt32: case TypeFlags.IntPtr: case TypeFlags.UIntPtr: return LLVM.Int32Type(); case TypeFlags.Class: case TypeFlags.Interface: return LLVM.PointerType(LLVM.Int8Type(), 0); case TypeFlags.Array: case TypeFlags.SzArray: case TypeFlags.ByRef: return LLVM.Int32Type(); case TypeFlags.Pointer: return LLVM.PointerType(GetLLVMTypeForTypeDesc(type.GetParameterType()), 0); case TypeFlags.Int64: case TypeFlags.UInt64: return LLVM.Int64Type(); case TypeFlags.Single: return LLVM.FloatType(); case TypeFlags.Double: return LLVM.DoubleType(); case TypeFlags.ValueType: case TypeFlags.Nullable: return LLVM.ArrayType(LLVM.Int8Type(), (uint)type.GetElementSize().AsInt); case TypeFlags.Enum: return GetLLVMTypeForTypeDesc(type.UnderlyingType); case TypeFlags.Void: return LLVM.VoidType(); default: throw new NotImplementedException(type.Category.ToString()); } } private int GetTotalLocalOffset() { int offset = 0; for (int i = 0; i < _locals.Length; i++) { offset += _locals[i].Type.GetElementSize().AsInt; } return offset; } private int GetTotalParameterOffset() { int offset = 0; for (int i = 0; i < _signature.Length; i++) { offset += _signature[i].GetElementSize().AsInt; } if (!_signature.IsStatic) { // If this is a struct, then it's a pointer on the stack if (_thisType.IsValueType) { offset += _thisType.Context.Target.PointerSize; } else { offset += _thisType.GetElementSize().AsInt; } } return offset; } private void GetArgSizeAndOffsetAtIndex(int index, out int size, out int offset) { int thisSize = 0; if (!_signature.IsStatic) { thisSize = _thisType.IsValueType ? _thisType.Context.Target.PointerSize : _thisType.GetElementSize().AsInt; if (index == 0) { size = thisSize; offset = 0; return; } else { index--; } } var argType = _signature[index]; size = argType.GetElementSize().AsInt; offset = thisSize; for (int i = 0; i < index; i++) { offset += _signature[i].GetElementSize().AsInt; } } private void GetLocalSizeAndOffsetAtIndex(int index, out int size, out int offset) { LocalVariableDefinition local = _locals[index]; size = local.Type.GetElementSize().AsInt; offset = 0; for (int i = 0; i < index; i++) { offset += _locals[i].Type.GetElementSize().AsInt; } } private void ImportAddressOfVar(int index, bool argument) { if (argument) { throw new NotImplementedException("ldarga"); } int localOffset = GetTotalParameterOffset(); GetLocalSizeAndOffsetAtIndex(index, out int size, out int offset); localOffset += offset; var localPtr = LLVM.BuildGEP(_builder, LLVM.GetFirstParam(_llvmFunction), new LLVMValueRef[] { LLVM.ConstInt(LLVM.Int32Type(), (uint)localOffset, LLVMMisc.False) }, "ldloca"); //var typedLocalPtr = LLVM.BuildPointerCast(_builder, localPtr, GetLLVMTypeForTypeDesc(_locals[index].Type.MakePointerType()), "ldloca"); _stack.Push(new ExpressionEntry(StackValueKind.NativeInt, "ldloca", localPtr, _locals[index].Type.MakePointerType())); } private void ImportDup() { _stack.Push(_stack.Peek().Duplicate()); } private void ImportPop() { _stack.Pop(); } private void ImportJmp(int token) { } private void ImportCasting(ILOpcode opcode, int token) { } private void ImportLoadNull() { _stack.Push(new ExpressionEntry(StackValueKind.ObjRef, "null", LLVM.ConstInt(LLVM.Int32Type(), 0, LLVMMisc.False))); } private void ImportReturn() { if (_signature.ReturnType != GetWellKnownType(WellKnownType.Void)) { StackEntry retVal = _stack.Pop(); LLVMTypeRef valueType = GetLLVMTypeForTypeDesc(_signature.ReturnType); ImportStoreHelper(retVal.LLVMValue, valueType, LLVM.GetNextParam(LLVM.GetFirstParam(_llvmFunction)), 0); } LLVM.BuildRetVoid(_builder); } private void ImportCall(ILOpcode opcode, int token) { MethodDesc callee = (MethodDesc)_methodIL.GetObject(token); if (callee.IsIntrinsic) { if (ImportIntrinsicCall(callee)) { return; } } if (callee.IsPInvoke) { ImportRawPInvoke(callee); return; } // we don't really have virtual call support, but we'll treat it as direct for now if (opcode != ILOpcode.call && opcode != ILOpcode.callvirt) { throw new NotImplementedException(); } if (opcode == ILOpcode.callvirt && callee.IsAbstract) { throw new NotImplementedException(); } HandleCall(callee); } /// <summary> /// Implements intrinsic methods instread of calling them /// </summary> /// <returns>True if the method was implemented</returns> private bool ImportIntrinsicCall(MethodDesc method) { Debug.Assert(method.IsIntrinsic); if (!(method.OwningType is MetadataType metadataType)) { return false; } switch (method.Name) { // Workaround for not being able to build a WASM version of CoreLib. This method // would return the x64 size, which is too large for WASM case "get_OffsetToStringData": if (metadataType.Name == "RuntimeHelpers" && metadataType.Namespace == "System.Runtime.CompilerServices") { _stack.Push(new Int32ConstantEntry(8, _method.Context.GetWellKnownType(WellKnownType.Int32))); return true; } break; } return false; } private void HandleCall(MethodDesc callee) { AddMethodReference(callee); string calleeName = _compilation.NameMangler.GetMangledMethodName(callee).ToString(); LLVMValueRef fn = GetOrCreateLLVMFunction(calleeName); int offset = GetTotalParameterOffset() + GetTotalLocalOffset() + callee.Signature.ReturnType.GetElementSize().AsInt; LLVMValueRef shadowStack = LLVM.BuildGEP(_builder, LLVM.GetFirstParam(_llvmFunction), new LLVMValueRef[] { LLVM.ConstInt(LLVM.Int32Type(), (uint)offset, LLVMMisc.False) }, String.Empty); var castShadowStack = LLVM.BuildPointerCast(_builder, shadowStack, LLVM.PointerType(LLVM.Int8Type(), 0), String.Empty); int returnOffset = GetTotalParameterOffset() + GetTotalLocalOffset(); var returnAddress = LLVM.BuildGEP(_builder, LLVM.GetFirstParam(_llvmFunction), new LLVMValueRef[] { LLVM.ConstInt(LLVM.Int32Type(), (uint)returnOffset, LLVMMisc.False) }, String.Empty); var castReturnAddress = LLVM.BuildPointerCast(_builder, returnAddress, LLVM.PointerType(LLVM.Int8Type(), 0), String.Empty); // argument offset uint argOffset = 0; int instanceAdjustment = 0; if (!callee.Signature.IsStatic) { instanceAdjustment = 1; } // The last argument is the top of the stack. We need to reverse them and store starting at the first argument LLVMValueRef[] argumentValues = new LLVMValueRef[callee.Signature.Length + instanceAdjustment]; for(int i = 0; i < argumentValues.Length; i++) { argumentValues[argumentValues.Length - i - 1] = _stack.Pop().LLVMValue; } for (int index = 0; index < argumentValues.Length; index++) { LLVMValueRef toStore = argumentValues[index]; TypeDesc argType; if (index == 0 && !callee.Signature.IsStatic) { argType = callee.OwningType; } else { argType = callee.Signature[index - instanceAdjustment]; } LLVMTypeRef valueType = GetLLVMTypeForTypeDesc(argType); ImportStoreHelper(toStore, valueType, castShadowStack, argOffset); argOffset += (uint)argType.GetElementSize().AsInt; } LLVM.BuildCall(_builder, fn, new LLVMValueRef[] { castShadowStack, castReturnAddress}, string.Empty); if (!callee.Signature.ReturnType.IsVoid) { LLVMTypeRef returnLLVMType = GetLLVMTypeForTypeDesc(callee.Signature.ReturnType); LLVMValueRef returnLLVMPointer = LLVM.BuildPointerCast(_builder, returnAddress, LLVM.PointerType(returnLLVMType, 0), String.Empty); LLVMValueRef loadResult = LLVM.BuildLoad(_builder, returnLLVMPointer, String.Empty); PushExpression(GetStackValueKind(callee.Signature.ReturnType), String.Empty, loadResult, callee.Signature.ReturnType); } } private void AddMethodReference(MethodDesc method) { _dependencies.Add(_compilation.NodeFactory.MethodEntrypoint(method)); } private void ImportRawPInvoke(MethodDesc method) { LLVMValueRef nativeFunc = LLVM.GetNamedFunction(Module, method.Name); // Create an import if we haven't already if (nativeFunc.Pointer == IntPtr.Zero) { // Set up native parameter types LLVMTypeRef[] paramTypes = new LLVMTypeRef[method.Signature.Length]; for (int i = 0; i < paramTypes.Length; i++) { paramTypes[i] = GetLLVMTypeForTypeDesc(method.Signature[i]); } // Define the full signature LLVMTypeRef nativeFuncType = LLVM.FunctionType(GetLLVMTypeForTypeDesc(method.Signature.ReturnType), paramTypes, LLVMMisc.False); nativeFunc = LLVM.AddFunction(Module, method.Name, nativeFuncType); LLVM.SetLinkage(nativeFunc, LLVMLinkage.LLVMDLLImportLinkage); } LLVMValueRef[] arguments = new LLVMValueRef[method.Signature.Length]; for(int i = 0; i < arguments.Length; i++) { LLVMValueRef argValue = _stack.Pop().LLVMValue; // Arguments are reversed on the stack // Coerce pointers to the native type TypeDesc signatureType = method.Signature[arguments.Length - i - 1]; LLVMValueRef typedValue = argValue; if (signatureType.IsPointer) { LLVMTypeRef signatureLlvmType = GetLLVMTypeForTypeDesc(signatureType); typedValue = LLVM.BuildPointerCast(_builder, argValue, signatureLlvmType, String.Empty); } arguments[arguments.Length - i - 1] = typedValue; } var returnValue = LLVM.BuildCall(_builder, nativeFunc, arguments, "call"); // TODO: void returns PushExpression(GetStackValueKind(method.Signature.ReturnType), String.Empty, returnValue, method.Signature.ReturnType); } private void ImportCalli(int token) { } private void ImportLdFtn(int token, ILOpcode opCode) { } private void ImportLoadInt(long value, StackValueKind kind) { switch (kind) { case StackValueKind.Int32: case StackValueKind.NativeInt: _stack.Push(new Int32ConstantEntry((int)value, _method.Context.GetWellKnownType(WellKnownType.Int32))); break; case StackValueKind.Int64: _stack.Push(new Int64ConstantEntry(value, _method.Context.GetWellKnownType(WellKnownType.Int64))); break; default: throw new InvalidOperationException(kind.ToString()); } } private void ImportLoadFloat(double value) { _stack.Push(new FloatConstantEntry(value, _method.Context.GetWellKnownType(WellKnownType.Double))); } private void ImportBranch(ILOpcode opcode, BasicBlock target, BasicBlock fallthrough) { if (opcode == ILOpcode.br) { LLVM.BuildBr(_builder, GetLLVMBasicBlockForBlock(target)); } else { LLVMValueRef condition; if (opcode == ILOpcode.brfalse || opcode == ILOpcode.brtrue) { var op = _stack.Pop(); LLVMValueRef value = op.LLVMValue; if (LLVM.GetTypeKind(LLVM.TypeOf(value)) == LLVMTypeKind.LLVMPointerTypeKind) { value = LLVM.BuildPtrToInt(_builder, value, LLVM.Int32Type(), String.Empty); } if (opcode == ILOpcode.brfalse) { condition = LLVM.BuildICmp(_builder, LLVMIntPredicate.LLVMIntEQ, value, LLVM.ConstInt(LLVM.TypeOf(value), 0, LLVMMisc.False), "brfalse"); } else { condition = LLVM.BuildICmp(_builder, LLVMIntPredicate.LLVMIntNE, value, LLVM.ConstInt(LLVM.TypeOf(value), 0, LLVMMisc.False), "brtrue"); } } else { var op1 = _stack.Pop(); var op2 = _stack.Pop(); // StackValueKind is carefully ordered to make this work (assuming the IL is valid) StackValueKind kind; if (op1.Kind > op2.Kind) { kind = op1.Kind; } else { kind = op2.Kind; } switch (opcode) { case ILOpcode.beq: condition = LLVM.BuildICmp(_builder, LLVMIntPredicate.LLVMIntEQ, op1.LLVMValue, op2.LLVMValue, "beq"); break; case ILOpcode.bge: condition = LLVM.BuildICmp(_builder, LLVMIntPredicate.LLVMIntSGE, op1.LLVMValue, op2.LLVMValue, "bge"); break; case ILOpcode.bgt: condition = LLVM.BuildICmp(_builder, LLVMIntPredicate.LLVMIntSGT, op1.LLVMValue, op2.LLVMValue, "bgt"); break; case ILOpcode.ble: condition = LLVM.BuildICmp(_builder, LLVMIntPredicate.LLVMIntSLE, op1.LLVMValue, op2.LLVMValue, "ble"); break; case ILOpcode.blt: condition = LLVM.BuildICmp(_builder, LLVMIntPredicate.LLVMIntSLT, op1.LLVMValue, op2.LLVMValue, "blt"); break; case ILOpcode.bne_un: condition = LLVM.BuildICmp(_builder, LLVMIntPredicate.LLVMIntNE, op1.LLVMValue, op2.LLVMValue, "bne_un"); break; case ILOpcode.bge_un: condition = LLVM.BuildICmp(_builder, LLVMIntPredicate.LLVMIntUGE, op1.LLVMValue, op2.LLVMValue, "bge_un"); break; case ILOpcode.bgt_un: condition = LLVM.BuildICmp(_builder, LLVMIntPredicate.LLVMIntUGT, op1.LLVMValue, op2.LLVMValue, "bgt_un"); break; case ILOpcode.ble_un: condition = LLVM.BuildICmp(_builder, LLVMIntPredicate.LLVMIntULE, op1.LLVMValue, op2.LLVMValue, "ble_un"); break; case ILOpcode.blt_un: condition = LLVM.BuildICmp(_builder, LLVMIntPredicate.LLVMIntULT, op1.LLVMValue, op2.LLVMValue, "blt_un"); break; default: throw new NotSupportedException(); // unreachable } } LLVM.BuildCondBr(_builder, condition, GetLLVMBasicBlockForBlock(target), GetLLVMBasicBlockForBlock(fallthrough)); ImportFallthrough(fallthrough); } ImportFallthrough(target); } private void ImportSwitchJump(int jmpBase, int[] jmpDelta, BasicBlock fallthrough) { } private void ImportLoadIndirect(int token) { ImportLoadIndirect(ResolveTypeToken(token)); } private void ImportLoadIndirect(TypeDesc type) { StackEntry pointer = _stack.Pop(); LLVMTypeRef loadType = GetLLVMTypeForTypeDesc(type); LLVMTypeRef pointerType = LLVM.PointerType(loadType, 0); LLVMValueRef typedPointer; if (LLVM.GetTypeKind(LLVM.TypeOf(pointer.LLVMValue)) != LLVMTypeKind.LLVMPointerTypeKind) { typedPointer = LLVM.BuildIntToPtr(_builder, pointer.LLVMValue, pointerType, "ldindintptrcast"); } else { typedPointer = LLVM.BuildPointerCast(_builder, pointer.LLVMValue, pointerType, "ldindptrcast"); } LLVMValueRef load = LLVM.BuildLoad(_builder, typedPointer, "ldind"); PushExpression(GetStackValueKind(type), "ldlind", load, type); } private void ImportStoreIndirect(int token) { ImportStoreIndirect(ResolveTypeToken(token)); } private void ImportStoreIndirect(TypeDesc type) { StackEntry value = _stack.Pop(); StackEntry destinationPointer = _stack.Pop(); LLVMTypeRef requestedPointerType = LLVM.PointerType(GetLLVMTypeForTypeDesc(type), 0); LLVMValueRef typedValue = value.LLVMValue; LLVMValueRef typedPointer = destinationPointer.LLVMValue; if (LLVM.GetTypeKind(LLVM.TypeOf(destinationPointer.LLVMValue)) != LLVMTypeKind.LLVMPointerTypeKind) { typedPointer = LLVM.BuildIntToPtr(_builder, destinationPointer.LLVMValue, requestedPointerType, "stindintptrcast"); } else { typedPointer = LLVM.BuildPointerCast(_builder, destinationPointer.LLVMValue, requestedPointerType, "stindptrcast"); } if (value.Type != type) { if (LLVM.GetTypeKind(GetLLVMTypeForTypeDesc(value.Type)) != LLVMTypeKind.LLVMPointerTypeKind) { typedValue = LLVM.BuildIntCast(_builder, typedValue, GetLLVMTypeForTypeDesc(type), "stindvalcast"); } else { typedValue = LLVM.BuildPointerCast(_builder, typedValue, GetLLVMTypeForTypeDesc(type), "stindvalptrcast"); } } LLVM.BuildStore(_builder, typedValue, typedPointer); } private void ImportBinaryOperation(ILOpcode opcode) { StackEntry op1 = _stack.Pop(); StackEntry op2 = _stack.Pop(); // StackValueKind is carefully ordered to make this work (assuming the IL is valid) StackValueKind kind; TypeDesc type; if (op1.Kind > op2.Kind) { kind = op1.Kind; type = op1.Type; } else { kind = op2.Kind; type = op2.Type; } // The one exception from the above rule if ((kind == StackValueKind.ByRef) && (opcode == ILOpcode.sub || opcode == ILOpcode.sub_ovf || opcode == ILOpcode.sub_ovf_un)) { kind = StackValueKind.NativeInt; type = null; } LLVMValueRef result; switch (opcode) { case ILOpcode.add: // TODO: converting these to ints should also happen for sub and some other operations LLVMValueRef left = op1.LLVMValue; LLVMValueRef right = op2.LLVMValue; if (kind == StackValueKind.NativeInt || kind == StackValueKind.ObjRef || kind == StackValueKind.ByRef) { if(LLVM.GetTypeKind(LLVM.TypeOf(left)) == LLVMTypeKind.LLVMPointerTypeKind) { left = LLVM.BuildPtrToInt(_builder, left, LLVM.Int32Type(), "lptrasint"); } if (LLVM.GetTypeKind(LLVM.TypeOf(right)) == LLVMTypeKind.LLVMPointerTypeKind) { right = LLVM.BuildPtrToInt(_builder, right, LLVM.Int32Type(), "rptrasint"); } } result = LLVM.BuildAdd(_builder, left, right, "add"); break; case ILOpcode.sub: result = LLVM.BuildSub(_builder, op1.LLVMValue, op2.LLVMValue, "sub"); break; case ILOpcode.mul: result = LLVM.BuildMul(_builder, op1.LLVMValue, op2.LLVMValue, "mul"); break; case ILOpcode.div: result = LLVM.BuildSDiv(_builder, op1.LLVMValue, op2.LLVMValue, "sdiv"); break; case ILOpcode.div_un: result = LLVM.BuildUDiv(_builder, op1.LLVMValue, op2.LLVMValue, "udiv"); break; case ILOpcode.rem: result = LLVM.BuildSRem(_builder, op1.LLVMValue, op2.LLVMValue, "srem"); break; case ILOpcode.rem_un: result = LLVM.BuildURem(_builder, op1.LLVMValue, op2.LLVMValue, "urem"); break; case ILOpcode.and: result = LLVM.BuildAnd(_builder, op1.LLVMValue, op2.LLVMValue, "and"); break; case ILOpcode.or: result = LLVM.BuildOr(_builder, op1.LLVMValue, op2.LLVMValue, "or"); break; case ILOpcode.xor: result = LLVM.BuildXor(_builder, op1.LLVMValue, op2.LLVMValue, "xor"); break; // TODO: Overflow checks case ILOpcode.add_ovf: case ILOpcode.add_ovf_un: result = LLVM.BuildAdd(_builder, op1.LLVMValue, op2.LLVMValue, "add"); break; case ILOpcode.sub_ovf: case ILOpcode.sub_ovf_un: result = LLVM.BuildSub(_builder, op1.LLVMValue, op2.LLVMValue, "sub"); break; case ILOpcode.mul_ovf: case ILOpcode.mul_ovf_un: result = LLVM.BuildMul(_builder, op1.LLVMValue, op2.LLVMValue, "mul"); break; default: throw new InvalidOperationException(); // Should be unreachable } PushExpression(kind, "", result, type); } private void ImportShiftOperation(ILOpcode opcode) { LLVMValueRef result; StackEntry numBitsToShift = _stack.Pop(); StackEntry valueToShift = _stack.Pop(); switch (opcode) { case ILOpcode.shl: result = LLVM.BuildShl(_builder, valueToShift.LLVMValue, numBitsToShift.LLVMValue, "shl"); break; case ILOpcode.shr: result = LLVM.BuildAShr(_builder, valueToShift.LLVMValue, numBitsToShift.LLVMValue, "shr"); break; case ILOpcode.shr_un: result = LLVM.BuildLShr(_builder, valueToShift.LLVMValue, numBitsToShift.LLVMValue, "shr"); break; default: throw new InvalidOperationException(); // Should be unreachable } PushExpression(valueToShift.Kind, "", result, valueToShift.Type); } private void ImportCompareOperation(ILOpcode opcode) { var op1 = _stack.Pop(); var op2 = _stack.Pop(); // StackValueKind is carefully ordered to make this work (assuming the IL is valid) StackValueKind kind; if (op1.Kind > op2.Kind) { kind = op1.Kind; } else { kind = op2.Kind; } LLVMValueRef result; switch (opcode) { case ILOpcode.ceq: result = LLVM.BuildICmp(_builder, LLVMIntPredicate.LLVMIntEQ, op2.LLVMValue, op1.LLVMValue, "ceq"); break; case ILOpcode.cgt: result = LLVM.BuildICmp(_builder, LLVMIntPredicate.LLVMIntSGT, op2.LLVMValue, op1.LLVMValue, "cgt"); break; case ILOpcode.clt: result = LLVM.BuildICmp(_builder, LLVMIntPredicate.LLVMIntSLT, op2.LLVMValue, op1.LLVMValue, "clt"); break; case ILOpcode.cgt_un: result = LLVM.BuildICmp(_builder, LLVMIntPredicate.LLVMIntUGT, op2.LLVMValue, op1.LLVMValue, "cgt_un"); break; case ILOpcode.clt_un: result = LLVM.BuildICmp(_builder, LLVMIntPredicate.LLVMIntULT, op2.LLVMValue, op1.LLVMValue, "clt_un"); break; default: throw new NotSupportedException(); // unreachable } PushExpression(kind, "", result, GetWellKnownType(WellKnownType.SByte)); } private void ImportConvert(WellKnownType wellKnownType, bool checkOverflow, bool unsigned) { StackEntry value = _stack.Pop(); StackEntry convertedValue = value.Duplicate(); //conv.u for a pointer should change to a int8* if(wellKnownType == WellKnownType.UIntPtr) { if (value.Kind == StackValueKind.Int32) { convertedValue.LLVMValue = LLVM.BuildIntToPtr(_builder, value.LLVMValue, LLVM.PointerType(LLVM.Int8Type(), 0), "conv.u"); } } _stack.Push(convertedValue); } private void ImportUnaryOperation(ILOpcode opCode) { var argument = _stack.Pop(); LLVMValueRef result; switch (opCode) { case ILOpcode.neg: if (argument.Kind == StackValueKind.Float) { result = LLVM.BuildFNeg(_builder, argument.LLVMValue, "neg"); } else { result = LLVM.BuildNeg(_builder, argument.LLVMValue, "neg"); } break; case ILOpcode.not: result = LLVM.BuildNot(_builder, argument.LLVMValue, "not"); break; default: throw new NotSupportedException(); // unreachable } PushExpression(argument.Kind, "", result, argument.Type); } private void ImportCpOpj(int token) { } private void ImportUnbox(int token, ILOpcode opCode) { } private void ImportRefAnyVal(int token) { } private void ImportCkFinite() { } private void ImportMkRefAny(int token) { } private void ImportLdToken(int token) { var ldtokenValue = _methodIL.GetObject(token); WellKnownType ldtokenKind; string name; StackEntry value; if (ldtokenValue is TypeDesc) { ldtokenKind = WellKnownType.RuntimeTypeHandle; //AddTypeReference((TypeDesc)ldtokenValue, false); // todo: this doesn't work because we don't have the eetypeptr pushed. How do we get the eetypeptr? MethodDesc helper = _compilation.TypeSystemContext.GetHelperEntryPoint("LdTokenHelpers", "GetRuntimeTypeHandle"); //AddMethodReference(helper); HandleCall(helper); name = ldtokenValue.ToString(); //value = new LdTokenEntry<TypeDesc>(StackValueKind.ValueType, name, (TypeDesc)ldtokenValue, GetWellKnownType(ldtokenKind)); } else if (ldtokenValue is FieldDesc) { ldtokenKind = WellKnownType.RuntimeFieldHandle; // todo: this is probably the wrong llvm value for the field value = new LdTokenEntry<FieldDesc>(StackValueKind.ValueType, null, (FieldDesc)ldtokenValue, LLVM.ConstInt(LLVM.Int32Type(), (uint)token, LLVMMisc.False), GetWellKnownType(ldtokenKind)); _stack.Push(value); } else if (ldtokenValue is MethodDesc) { throw new NotImplementedException(); } else { throw new InvalidOperationException(); } } private void ImportLocalAlloc() { } private void ImportEndFilter() { } private void ImportCpBlk() { } private void ImportInitBlk() { } private void ImportRethrow() { } private void ImportSizeOf(int token) { } private void ImportRefAnyType() { } private void ImportArgList() { } private void ImportUnalignedPrefix(byte alignment) { } private void ImportVolatilePrefix() { } private void ImportTailPrefix() { } private void ImportConstrainedPrefix(int token) { } private void ImportNoPrefix(byte mask) { } private void ImportReadOnlyPrefix() { } private void ImportThrow() { var exceptionObject = _stack.Pop(); EmitTrapCall(); } private void ImportLoadField(int token, bool isStatic) { if (isStatic) { throw new NotImplementedException("static ldfld"); } FieldDesc field = (FieldDesc)_methodIL.GetObject(token); StackEntry objectEntry = _stack.Pop(); var untypedObjectPointer = LLVM.BuildPointerCast(_builder, objectEntry.LLVMValue, LLVM.PointerType(LLVMTypeRef.Int8Type(), 0), String.Empty); var loadLocation = LLVM.BuildGEP(_builder, untypedObjectPointer, new LLVMValueRef[] { LLVM.ConstInt(LLVM.Int32Type(), (ulong)field.Offset.AsInt, LLVMMisc.False) }, String.Empty); var typedLoadLocation = LLVM.BuildPointerCast(_builder, loadLocation, LLVM.PointerType(GetLLVMTypeForTypeDesc(field.FieldType), 0), String.Empty); LLVMValueRef loadValue = LLVM.BuildLoad(_builder, typedLoadLocation, "ldfld_" + field.Name); PushExpression(GetStackValueKind(field.FieldType), "ldfld", loadValue, field.FieldType); } private void ImportAddressOfField(int token, bool isStatic) { } private void ImportStoreField(int token, bool isStatic) { if(isStatic) { throw new NotImplementedException("static stfld"); } FieldDesc field = (FieldDesc)_methodIL.GetObject(token); StackEntry valueEntry = _stack.Pop(); StackEntry objectEntry = _stack.Pop(); LLVMValueRef value = valueEntry.LLVMValue; // All integers are int32 on the stack, but need to be resized to fit fields if(valueEntry.Kind == StackValueKind.Int32) { value = LLVM.BuildIntCast(_builder, value, GetLLVMTypeForTypeDesc(field.FieldType), "intfieldcast"); } var untypedObjectPointer = LLVM.BuildPointerCast(_builder, objectEntry.LLVMValue, LLVM.PointerType(LLVMTypeRef.Int8Type(), 0), "stfld"); var storeLocation = LLVM.BuildGEP(_builder, untypedObjectPointer, new LLVMValueRef[] { LLVM.ConstInt(LLVM.Int32Type(), (ulong)field.Offset.AsInt, LLVMMisc.False) }, "stfld"); var typedStoreLocation = LLVM.BuildPointerCast(_builder, storeLocation, LLVM.PointerType(GetLLVMTypeForTypeDesc(field.FieldType), 0), "stfld"); LLVM.BuildStore(_builder, value, typedStoreLocation); } // Loads symbol address. Address is represented as a i32* private LLVMValueRef LoadAddressOfSymbolNode(ISymbolNode node) { LLVMValueRef addressOfAddress = WebAssemblyObjectWriter.GetSymbolValuePointer(Module, node, _compilation.NameMangler, false); //return addressOfAddress; return LLVM.BuildLoad(_builder, addressOfAddress, "LoadAddressOfSymbolNode"); } private void ImportLoadString(int token) { TypeDesc stringType = this._compilation.TypeSystemContext.GetWellKnownType(WellKnownType.String); string str = (string)_methodIL.GetObject(token); ISymbolNode node = _compilation.NodeFactory.SerializedStringObject(str); LLVMValueRef stringDataPointer = LoadAddressOfSymbolNode(node); _dependencies.Add(node); _stack.Push(new ExpressionEntry(GetStackValueKind(stringType), String.Empty, stringDataPointer, stringType)); } private void ImportInitObj(int token) { } private void ImportBox(int token) { } private void ImportLeave(BasicBlock target) { for (int i = 0; i < _exceptionRegions.Length; i++) { var r = _exceptionRegions[i]; if (r.ILRegion.Kind == ILExceptionRegionKind.Finally && IsOffsetContained(_currentOffset - 1, r.ILRegion.TryOffset, r.ILRegion.TryLength) && !IsOffsetContained(target.StartOffset, r.ILRegion.TryOffset, r.ILRegion.TryLength)) { MarkBasicBlock(_basicBlocks[r.ILRegion.HandlerOffset]); } } MarkBasicBlock(target); LLVM.BuildBr(_builder, GetLLVMBasicBlockForBlock(target)); } private static bool IsOffsetContained(int offset, int start, int length) { return start <= offset && offset < start + length; } private void ImportNewArray(int token) { } private void ImportLoadElement(int token) { } private void ImportLoadElement(TypeDesc elementType) { } private void ImportStoreElement(int token) { } private void ImportStoreElement(TypeDesc elementType) { } private void ImportLoadLength() { } private void ImportAddressOfElement(int token) { } private void ImportEndFinally() { } private void ImportFallthrough(BasicBlock next) { EvaluationStack<StackEntry> entryStack = next.EntryStack; if (entryStack != null) { if (entryStack.Length != _stack.Length) throw new InvalidProgramException(); for (int i = 0; i < entryStack.Length; i++) { // TODO: Do we need to allow conversions? if (entryStack[i].Kind != _stack[i].Kind) throw new InvalidProgramException(); if (entryStack[i].Kind == StackValueKind.ValueType) { if (entryStack[i].Type != _stack[i].Type) throw new InvalidProgramException(); } } } else { if (_stack.Length > 0) { entryStack = new EvaluationStack<StackEntry>(_stack.Length); #pragma warning disable 162 // Due to not implement3ed exception incrementer in for needs pragma warning disable for (int i = 0; i < _stack.Length; i++) { // todo: do we need anything special for spilled stacks like cpp codegen does? entryStack.Push(_stack[i]); //entryStack.Push(NewSpillSlot(_stack[i])); } #pragma warning restore 162 } next.EntryStack = entryStack; } if (entryStack != null) { // todo: do we have to do anything here? #pragma warning disable 162// Due to not implement3ed exception incrementer in for needs pragma warning disable for (int i = 0; i < entryStack.Length; i++) { /*AppendLine(); Append(entryStack[i]); Append(" = "); Append(_stack[i]); AppendSemicolon();*/ } #pragma warning restore 162 } MarkBasicBlock(next); } private TypeDesc ResolveTypeToken(int token) { return (TypeDesc)_methodIL.GetObject(token); } private TypeDesc GetWellKnownType(WellKnownType wellKnownType) { return _compilation.TypeSystemContext.GetWellKnownType(wellKnownType); } private void ReportInvalidBranchTarget(int targetOffset) { ThrowHelper.ThrowInvalidProgramException(); } private void ReportFallthroughAtEndOfMethod() { ThrowHelper.ThrowInvalidProgramException(); } private void ReportInvalidInstruction(ILOpcode opcode) { ThrowHelper.ThrowInvalidProgramException(); } private void EmitTrapCall() { if (TrapFunction.Pointer == IntPtr.Zero) { TrapFunction = LLVM.AddFunction(Module, "llvm.trap", LLVM.FunctionType(LLVM.VoidType(), Array.Empty<LLVMTypeRef>(), false)); } LLVM.BuildCall(_builder, TrapFunction, Array.Empty<LLVMValueRef>(), string.Empty); } public override string ToString() { return _method.ToString(); } } }
// // Log.cs // // Author: // Zachary Gramana <[email protected]> // // Copyright (c) 2014 Xamarin Inc // Copyright (c) 2014 .NET Foundation // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // // // Copyright (c) 2014 Couchbase, Inc. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file // except in compliance with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distributed under the // License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, // either express or implied. See the License for the specific language governing permissions // and limitations under the License. // using System; using System.Collections.Generic; using System.Diagnostics; using System.Threading; namespace Couchbase.Lite.Util { /// <summary> /// Centralized logging facility. /// </summary> public static class Log { #region Enums /// <summary> /// A level of logging verbosity /// </summary> public enum LogLevel { /// <summary> /// No logs are output /// </summary> None, /// <summary> /// Informational logs are output (the default for most) /// </summary> Base, /// <summary> /// Verbose logs are output /// </summary> Verbose, /// <summary> /// Debugging logs are output (Only applicable in debug builds) /// </summary> Debug } #endregion #region Variables internal static readonly LogTo To = new LogTo(); /// <summary> /// The available logging domains (for use with setting the /// logging level on various domains) /// </summary> public static readonly LogDomains Domains = new LogDomains(To); /// <summary> /// Gets or sets a value indicated if logging is disabled (if so, /// nothing will be logged) /// </summary> public static bool Disabled { get; set; } #endregion #region Properties /// <summary> /// Gets or sets the level at which the logger will redacted sensivity /// information from the logs. /// </summary> public static LogScrubSensitivity ScrubSensitivity { get { return _scrubSensitivity; } set { if (value != _scrubSensitivity) { if (value == LogScrubSensitivity.AllOK) { foreach (var logger in Loggers) { logger.I("Log", "SCRUBBING DISABLED, THIS LOG MAY CONTAIN SENSITIVE INFORMATION"); } } _scrubSensitivity = value; } } } private static LogScrubSensitivity _scrubSensitivity; /// <summary> /// Gets or sets the logging level for Log.* calls (domains /// must be set with their respective interfaces /// </summary> public static LogLevel Level { get { return To.NoDomain.Level; } set { To.NoDomain.Level = value; } } private static List<ILogger> _Loggers = new List<ILogger> { LoggerFactory.CreateLogger() }; internal static IEnumerable<ILogger> Loggers { get { return _Loggers; } } #endregion #region Constructors static Log() { Level = LogLevel.Base; #if !__IOS__ && !__ANDROID__ && !NET_3_5 var configSection = System.Configuration.ConfigurationManager.GetSection("couchbaselite") as Couchbase.Lite.Configuration.CouchbaseConfigSection; if(configSection != null && configSection.Logging != null) { Log.Disabled = !configSection.Logging.Enabled; Log.ScrubSensitivity = configSection.Logging.ScrubSensitivity; Log.Level = configSection.Logging.GlobalLevel; foreach(var logSetting in configSection.Logging.VerbositySettings.Values) { var property = Log.Domains.GetType().GetProperty(logSetting.Key); if(property == null) { Log.To.NoDomain.W("Log", "Invalid domain {0} in configuration file", logSetting.Key); continue; } var gotLogger = (IDomainLogging)property.GetValue(Log.Domains); gotLogger.Level = logSetting.Value; } } #endif } #endregion #region Public Methods /// <summary> /// Sets the logger. /// </summary> /// <returns><c>true</c>, if Logger was set, <c>false</c> otherwise.</returns> /// <param name="customLogger">Custom logger.</param> public static bool SetLogger(ILogger customLogger) { var loggers = _Loggers; if (loggers != null) { foreach (var logger in loggers) { var disposable = logger as IDisposable; if (disposable != null) { disposable.Dispose(); } } } _Loggers = customLogger != null ? new List<ILogger> { customLogger } : new List<ILogger>(); return true; } /// <summary> /// Add a logger to the list of loggers to write output to /// </summary> /// <param name="logger">The logger to add</param> public static void AddLogger(ILogger logger) { if (logger != null) { _Loggers.Add (logger); } } /// <summary> /// Sets the logger to the library provided logger /// </summary> /// <returns><c>true</c>, if logger was set, <c>false</c> otherwise.</returns> public static bool SetDefaultLogger() { return SetLogger(LoggerFactory.CreateLogger()); } /// <summary> /// Sets up Couchbase Lite to use the default logger (an internal class), /// with the specified logging level /// </summary> /// <returns><c>true</c>, if the logger was changed, <c>false</c> otherwise.</returns> /// <param name="level">The levels to log</param> [Obsolete("Use the SetDefaultLogger() with no arguments to restore the default logger," + " and use the Level property to change the verbosity")] public static bool SetDefaultLoggerWithLevel(SourceLevels level) { if (level.HasFlag(SourceLevels.All)) { Level = LogLevel.Debug; } else { if (level.HasFlag(SourceLevels.Information) || level.HasFlag(SourceLevels.Warning) || level.HasFlag(SourceLevels.Error)) { Level = LogLevel.Base; } if (level.HasFlag(SourceLevels.Verbose)) { Level = LogLevel.Verbose; } if (level.HasFlag(SourceLevels.ActivityTracing)) { Level = LogLevel.Debug; } } return SetLogger(LoggerFactory.CreateLogger()); } /// <summary>Send a VERBOSE message.</summary> /// <param name="tag"> /// Used to identify the source of a log message. It usually identifies /// the class or activity where the log call occurs. /// </param> /// <param name="msg">The message you would like logged.</param> public static void V(string tag, string msg) { To.NoDomain.V(tag, msg); } /// <summary>Send a VERBOSE message and log the exception.</summary> /// <param name="tag"> /// Used to identify the source of a log message. It usually identifies /// the class or activity where the log call occurs. /// </param> /// <param name="msg">The message you would like logged.</param> /// <param name="tr">An exception to log</param> public static void V(string tag, string msg, Exception tr) { To.NoDomain.V(tag, msg, tr); } /// <summary>Send a VERBOSE message</summary> /// <param name="tag"> /// Used to identify the source of a log message. It usually identifies /// the class or activity where the log call occurs. /// </param> /// <param name="format">The format of the message you would like logged.</param> /// <param name="args">The message format arguments</param> public static void V(string tag, string format, params object[] args) { To.NoDomain.V(tag, format, args); } /// <summary>Send a DEBUG message.</summary> /// <param name="tag"> /// Used to identify the source of a log message. It usually identifies /// the class or activity where the log call occurs. /// </param> /// <param name="msg">The message you would like logged.</param> [System.Diagnostics.Conditional("DEBUG")] public static void D(string tag, string msg) { To.NoDomain.D(tag, msg); } /// <summary>Send a DEBUG message and log the exception.</summary> /// <param name="tag"> /// Used to identify the source of a log message. It usually identifies /// the class or activity where the log call occurs. /// </param> /// <param name="msg">The message you would like logged.</param> /// <param name="tr">An exception to log</param> [System.Diagnostics.Conditional("DEBUG")] public static void D(string tag, string msg, Exception tr) { To.NoDomain.D(tag, msg, tr); } /// <summary>Send a DEBUG message</summary> /// <param name="tag"> /// Used to identify the source of a log message. It usually identifies /// the class or activity where the log call occurs. /// </param> /// <param name="format">The format of the message you would like logged.</param> /// <param name="args">The message format arguments</param> [System.Diagnostics.Conditional("DEBUG")] public static void D(string tag, string format, params object[] args) { To.NoDomain.D(tag, format, args); } /// <summary>Send an INFO message.</summary> /// <param name="tag"> /// Used to identify the source of a log message. It usually identifies /// the class or activity where the log call occurs. /// </param> /// <param name="msg">The message you would like logged.</param> public static void I(string tag, string msg) { To.NoDomain.I(tag, msg); } /// <summary>Send a INFO message and log the exception.</summary> /// <param name="tag"> /// Used to identify the source of a log message. It usually identifies /// the class or activity where the log call occurs. /// </param> /// <param name="msg">The message you would like logged.</param> /// <param name="tr">An exception to log</param> public static void I(string tag, string msg, Exception tr) { To.NoDomain.I(tag, msg, tr); } /// <summary>Send a INFO message</summary> /// <param name="tag"> /// Used to identify the source of a log message. It usually identifies /// the class or activity where the log call occurs. /// </param> /// <param name="format">The format of the message you would like logged.</param> /// <param name="args">The message format arguments</param> public static void I(string tag, string format, params object[] args) { To.NoDomain.I(tag, format, args); } /// <summary>Send a WARN message.</summary> /// <param name="tag"> /// Used to identify the source of a log message. It usually identifies /// the class or activity where the log call occurs. /// </param> /// <param name="msg">The message you would like logged.</param> public static void W(string tag, string msg) { To.NoDomain.W(tag, msg); } /// <summary>Send a WARN message.</summary> /// <param name="tag">Tag.</param> /// <param name="tr">Exception</param> [Obsolete("This method signature is not like the others and will be removed")] public static void W(string tag, Exception tr) { To.NoDomain.W(tag, "No message, do not call this method"); } /// <summary>Send a WARN message and log the exception.</summary> /// <param name="tag"> /// Used to identify the source of a log message. It usually identifies /// the class or activity where the log call occurs. /// </param> /// <param name="msg">The message you would like logged.</param> /// <param name="tr">An exception to log</param> public static void W(string tag, string msg, Exception tr) { To.NoDomain.W(tag, msg, tr); } /// <summary>Send a WARN message and log the exception.</summary> /// <param name="tag"> /// Used to identify the source of a log message. It usually identifies /// the class or activity where the log call occurs. /// </param> /// <param name="format">The format of the message you would like logged.</param> /// <param name="args">The message format arguments</param> public static void W(string tag, string format, params object[] args) { To.NoDomain.I(tag, format, args); } /// <summary>Send an ERROR message.</summary> /// <param name="tag"> /// Used to identify the source of a log message. It usually identifies /// the class or activity where the log call occurs. /// </param> /// <param name="msg">The message you would like logged.</param> public static void E(string tag, string msg) { To.NoDomain.E(tag, msg); } /// <summary>Send a ERROR message and log the exception.</summary> /// <param name="tag"> /// Used to identify the source of a log message. It usually identifies /// the class or activity where the log call occurs. /// </param> /// <param name="msg">The message you would like logged.</param> /// <param name="tr">An exception to log</param> public static void E(string tag, string msg, Exception tr) { To.NoDomain.E(tag, msg, tr); } /// <summary>Send a ERROR message</summary> /// <param name="tag"> /// Used to identify the source of a log message. It usually identifies /// the class or activity where the log call occurs. /// </param> /// <param name="format">The format of the message you would like logged.</param> /// <param name="args">The message format arguments</param> public static void E(string tag, string format, params object[] args) { To.NoDomain.E(tag, format, args); } #endregion } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Runtime.CompilerServices; using System.Threading; using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.CodeGeneration; using Microsoft.CodeAnalysis.Editor; using Microsoft.CodeAnalysis.Editor.Shared.Extensions; using Microsoft.CodeAnalysis.Editor.Shared.Utilities; using Microsoft.CodeAnalysis.Editing; using Microsoft.CodeAnalysis.Formatting; using Microsoft.CodeAnalysis.Formatting.Rules; using Microsoft.CodeAnalysis.Host; using Microsoft.CodeAnalysis.LanguageServices; using Microsoft.CodeAnalysis.Options; using Microsoft.CodeAnalysis.Rename; using Microsoft.CodeAnalysis.Shared.Extensions; using Microsoft.CodeAnalysis.Simplification; using Microsoft.CodeAnalysis.Text; using Microsoft.VisualStudio.LanguageServices.Implementation.CodeModel.ExternalElements; using Microsoft.VisualStudio.LanguageServices.Implementation.CodeModel.InternalElements; using Microsoft.VisualStudio.LanguageServices.Implementation.Interop; using Microsoft.VisualStudio.LanguageServices.Implementation.ProjectSystem; using Microsoft.VisualStudio.LanguageServices.Implementation.Utilities; using Microsoft.VisualStudio.Text; using Microsoft.VisualStudio.Text.Editor; using Microsoft.VisualStudio.Text.Editor.OptionsExtensionMethods; using Roslyn.Utilities; using Microsoft.CodeAnalysis.GeneratedCodeRecognition; namespace Microsoft.VisualStudio.LanguageServices.Implementation.CodeModel { internal abstract partial class AbstractCodeModelService : ICodeModelService { private readonly ConditionalWeakTable<SyntaxTree, IBidirectionalMap<SyntaxNodeKey, SyntaxNode>> _treeToNodeKeyMaps = new ConditionalWeakTable<SyntaxTree, IBidirectionalMap<SyntaxNodeKey, SyntaxNode>>(); protected readonly ISyntaxFactsService SyntaxFactsService; private readonly IEditorOptionsFactoryService _editorOptionsFactoryService; private readonly AbstractNodeNameGenerator _nodeNameGenerator; private readonly AbstractNodeLocator _nodeLocator; private readonly AbstractCodeModelEventCollector _eventCollector; private readonly IEnumerable<IRefactorNotifyService> _refactorNotifyServices; private readonly IFormattingRule _lineAdjustmentFormattingRule; private readonly IFormattingRule _endRegionFormattingRule; protected AbstractCodeModelService( HostLanguageServices languageServiceProvider, IEditorOptionsFactoryService editorOptionsFactoryService, IEnumerable<IRefactorNotifyService> refactorNotifyServices, IFormattingRule lineAdjustmentFormattingRule, IFormattingRule endRegionFormattingRule) { Debug.Assert(languageServiceProvider != null); Debug.Assert(editorOptionsFactoryService != null); this.SyntaxFactsService = languageServiceProvider.GetService<ISyntaxFactsService>(); _editorOptionsFactoryService = editorOptionsFactoryService; _lineAdjustmentFormattingRule = lineAdjustmentFormattingRule; _endRegionFormattingRule = endRegionFormattingRule; _refactorNotifyServices = refactorNotifyServices; _nodeNameGenerator = CreateNodeNameGenerator(); _nodeLocator = CreateNodeLocator(); _eventCollector = CreateCodeModelEventCollector(); } protected string GetNewLineCharacter(SourceText text) { return _editorOptionsFactoryService.GetEditorOptions(text).GetNewLineCharacter(); } protected int GetTabSize(SourceText text) { var snapshot = text.FindCorrespondingEditorTextSnapshot(); return GetTabSize(snapshot); } protected int GetTabSize(ITextSnapshot snapshot) { if (snapshot == null) { throw new ArgumentNullException(nameof(snapshot)); } var textBuffer = snapshot.TextBuffer; return _editorOptionsFactoryService.GetOptions(textBuffer).GetTabSize(); } protected SyntaxToken GetTokenWithoutAnnotation(SyntaxToken current, Func<SyntaxToken, SyntaxToken> nextTokenGetter) { while (current.ContainsAnnotations) { current = nextTokenGetter(current); } return current; } protected TextSpan GetEncompassingSpan(SyntaxNode root, SyntaxToken startToken, SyntaxToken endToken) { var startPosition = startToken.SpanStart; var endPosition = endToken.RawKind == 0 ? root.Span.End : endToken.Span.End; return TextSpan.FromBounds(startPosition, endPosition); } private IBidirectionalMap<SyntaxNodeKey, SyntaxNode> BuildNodeKeyMap(SyntaxTree syntaxTree) { var nameOrdinalMap = new Dictionary<string, int>(); var nodeKeyMap = BidirectionalMap<SyntaxNodeKey, SyntaxNode>.Empty; foreach (var node in GetFlattenedMemberNodes(syntaxTree)) { var name = _nodeNameGenerator.GenerateName(node); int ordinal; if (!nameOrdinalMap.TryGetValue(name, out ordinal)) { ordinal = 0; } nameOrdinalMap[name] = ++ordinal; var key = new SyntaxNodeKey(name, ordinal); nodeKeyMap = nodeKeyMap.Add(key, node); } return nodeKeyMap; } private IBidirectionalMap<SyntaxNodeKey, SyntaxNode> GetNodeKeyMap(SyntaxTree syntaxTree) { return _treeToNodeKeyMaps.GetValue(syntaxTree, BuildNodeKeyMap); } public SyntaxNodeKey GetNodeKey(SyntaxNode node) { var nodeKey = TryGetNodeKey(node); if (nodeKey.IsEmpty) { throw new ArgumentException(); } return nodeKey; } public SyntaxNodeKey TryGetNodeKey(SyntaxNode node) { var nodeKeyMap = GetNodeKeyMap(node.SyntaxTree); SyntaxNodeKey nodeKey; if (!nodeKeyMap.TryGetKey(node, out nodeKey)) { return SyntaxNodeKey.Empty; } return nodeKey; } public SyntaxNode LookupNode(SyntaxNodeKey nodeKey, SyntaxTree syntaxTree) { var nodeKeyMap = GetNodeKeyMap(syntaxTree); SyntaxNode node; if (!nodeKeyMap.TryGetValue(nodeKey, out node)) { throw new ArgumentException(); } return node; } public bool TryLookupNode(SyntaxNodeKey nodeKey, SyntaxTree syntaxTree, out SyntaxNode node) { var nodeKeyMap = GetNodeKeyMap(syntaxTree); return nodeKeyMap.TryGetValue(nodeKey, out node); } public abstract bool MatchesScope(SyntaxNode node, EnvDTE.vsCMElement scope); public abstract IEnumerable<SyntaxNode> GetOptionNodes(SyntaxNode parent); public abstract IEnumerable<SyntaxNode> GetImportNodes(SyntaxNode parent); public abstract IEnumerable<SyntaxNode> GetAttributeNodes(SyntaxNode parent); public abstract IEnumerable<SyntaxNode> GetAttributeArgumentNodes(SyntaxNode parent); public abstract IEnumerable<SyntaxNode> GetInheritsNodes(SyntaxNode parent); public abstract IEnumerable<SyntaxNode> GetImplementsNodes(SyntaxNode parent); public abstract IEnumerable<SyntaxNode> GetParameterNodes(SyntaxNode parent); protected IEnumerable<SyntaxNode> GetFlattenedMemberNodes(SyntaxTree syntaxTree) { return GetMemberNodes(syntaxTree.GetRoot(), includeSelf: true, recursive: true, logicalFields: true, onlySupportedNodes: true); } protected IEnumerable<SyntaxNode> GetLogicalMemberNodes(SyntaxNode container) { return GetMemberNodes(container, includeSelf: false, recursive: false, logicalFields: true, onlySupportedNodes: false); } public IEnumerable<SyntaxNode> GetLogicalSupportedMemberNodes(SyntaxNode container) { return GetMemberNodes(container, includeSelf: false, recursive: false, logicalFields: true, onlySupportedNodes: true); } /// <summary> /// Retrieves the members of a specified <paramref name="container"/> node. The members that are /// returned can be controlled by passing various parameters. /// </summary> /// <param name="container">The <see cref="SyntaxNode"/> from which to retrieve members.</param> /// <param name="includeSelf">If true, the container is returned as well.</param> /// <param name="recursive">If true, members are recursed to return descendant members as well /// as immediate children. For example, a namespace would return the namespaces and types within. /// However, if <paramref name="recursive"/> is true, members with the namespaces and types would /// also be returned.</param> /// <param name="logicalFields">If true, field declarations are broken into their respective declarators. /// For example, the field "int x, y" would return two declarators, one for x and one for y in place /// of the field.</param> /// <param name="onlySupportedNodes">If true, only members supported by Code Model are returned.</param> public abstract IEnumerable<SyntaxNode> GetMemberNodes(SyntaxNode container, bool includeSelf, bool recursive, bool logicalFields, bool onlySupportedNodes); public abstract string Language { get; } public abstract string AssemblyAttributeString { get; } public EnvDTE.CodeElement CreateExternalCodeElement(CodeModelState state, ProjectId projectId, ISymbol symbol) { switch (symbol.Kind) { case SymbolKind.Event: return (EnvDTE.CodeElement)ExternalCodeEvent.Create(state, projectId, (IEventSymbol)symbol); case SymbolKind.Field: return (EnvDTE.CodeElement)ExternalCodeVariable.Create(state, projectId, (IFieldSymbol)symbol); case SymbolKind.Method: return (EnvDTE.CodeElement)ExternalCodeFunction.Create(state, projectId, (IMethodSymbol)symbol); case SymbolKind.Namespace: return (EnvDTE.CodeElement)ExternalCodeNamespace.Create(state, projectId, (INamespaceSymbol)symbol); case SymbolKind.NamedType: var namedType = (INamedTypeSymbol)symbol; switch (namedType.TypeKind) { case TypeKind.Class: case TypeKind.Module: return (EnvDTE.CodeElement)ExternalCodeClass.Create(state, projectId, namedType); case TypeKind.Delegate: return (EnvDTE.CodeElement)ExternalCodeDelegate.Create(state, projectId, namedType); case TypeKind.Enum: return (EnvDTE.CodeElement)ExternalCodeEnum.Create(state, projectId, namedType); case TypeKind.Interface: return (EnvDTE.CodeElement)ExternalCodeInterface.Create(state, projectId, namedType); case TypeKind.Struct: return (EnvDTE.CodeElement)ExternalCodeStruct.Create(state, projectId, namedType); default: throw Exceptions.ThrowEFail(); } case SymbolKind.Property: var propertySymbol = (IPropertySymbol)symbol; return propertySymbol.IsWithEvents ? (EnvDTE.CodeElement)ExternalCodeVariable.Create(state, projectId, propertySymbol) : (EnvDTE.CodeElement)ExternalCodeProperty.Create(state, projectId, (IPropertySymbol)symbol); default: throw Exceptions.ThrowEFail(); } } /// <summary> /// Do not use this method directly! Instead, go through <see cref="FileCodeModel.GetOrCreateCodeElement{T}(SyntaxNode)"/> /// </summary> public abstract EnvDTE.CodeElement CreateInternalCodeElement( CodeModelState state, FileCodeModel fileCodeModel, SyntaxNode node); public EnvDTE.CodeElement CreateCodeType(CodeModelState state, ProjectId projectId, ITypeSymbol typeSymbol) { if (typeSymbol.TypeKind == TypeKind.Pointer || typeSymbol.TypeKind == TypeKind.TypeParameter || typeSymbol.TypeKind == TypeKind.Submission) { throw Exceptions.ThrowEFail(); } if (typeSymbol.TypeKind == TypeKind.Error || typeSymbol.TypeKind == TypeKind.Unknown) { return ExternalCodeUnknown.Create(state, projectId, typeSymbol); } var project = state.Workspace.CurrentSolution.GetProject(projectId); if (project == null) { throw Exceptions.ThrowEFail(); } if (typeSymbol.TypeKind == TypeKind.Dynamic) { var obj = project.GetCompilationAsync().Result.GetSpecialType(SpecialType.System_Object); return (EnvDTE.CodeElement)ExternalCodeClass.Create(state, projectId, obj); } EnvDTE.CodeElement element; if (TryGetElementFromSource(state, project, typeSymbol, out element)) { return element; } EnvDTE.vsCMElement elementKind = GetElementKind(typeSymbol); switch (elementKind) { case EnvDTE.vsCMElement.vsCMElementClass: case EnvDTE.vsCMElement.vsCMElementModule: return (EnvDTE.CodeElement)ExternalCodeClass.Create(state, projectId, typeSymbol); case EnvDTE.vsCMElement.vsCMElementInterface: return (EnvDTE.CodeElement)ExternalCodeInterface.Create(state, projectId, typeSymbol); case EnvDTE.vsCMElement.vsCMElementDelegate: return (EnvDTE.CodeElement)ExternalCodeDelegate.Create(state, projectId, typeSymbol); case EnvDTE.vsCMElement.vsCMElementEnum: return (EnvDTE.CodeElement)ExternalCodeEnum.Create(state, projectId, typeSymbol); case EnvDTE.vsCMElement.vsCMElementStruct: return (EnvDTE.CodeElement)ExternalCodeStruct.Create(state, projectId, typeSymbol); default: Debug.Fail("Unsupported element kind: " + elementKind); throw Exceptions.ThrowEInvalidArg(); } } public abstract EnvDTE.CodeTypeRef CreateCodeTypeRef(CodeModelState state, ProjectId projectId, object type); public abstract EnvDTE.vsCMTypeRef GetTypeKindForCodeTypeRef(ITypeSymbol typeSymbol); public abstract string GetAsFullNameForCodeTypeRef(ITypeSymbol typeSymbol); public abstract string GetAsStringForCodeTypeRef(ITypeSymbol typeSymbol); public abstract bool IsParameterNode(SyntaxNode node); public abstract bool IsAttributeNode(SyntaxNode node); public abstract bool IsAttributeArgumentNode(SyntaxNode node); public abstract bool IsOptionNode(SyntaxNode node); public abstract bool IsImportNode(SyntaxNode node); public ISymbol ResolveSymbol(Workspace workspace, ProjectId projectId, SymbolKey symbolId) { var project = workspace.CurrentSolution.GetProject(projectId); if (project == null) { throw Exceptions.ThrowEFail(); } return symbolId.Resolve(project.GetCompilationAsync().Result).Symbol; } protected EnvDTE.CodeFunction CreateInternalCodeAccessorFunction(CodeModelState state, FileCodeModel fileCodeModel, SyntaxNode node) { SyntaxNode parentNode = node .Ancestors() .FirstOrDefault(n => TryGetNodeKey(n) != SyntaxNodeKey.Empty); if (parentNode == null) { throw new InvalidOperationException(); } var parent = fileCodeModel.GetOrCreateCodeElement<EnvDTE.CodeElement>(parentNode); var parentObj = ComAggregate.GetManagedObject<AbstractCodeMember>(parent); var accessorKind = GetAccessorKind(node); return CodeAccessorFunction.Create(state, parentObj, accessorKind); } protected EnvDTE.CodeAttribute CreateInternalCodeAttribute(CodeModelState state, FileCodeModel fileCodeModel, SyntaxNode node) { var parentNode = GetEffectiveParentForAttribute(node); AbstractCodeElement parentObject; if (IsParameterNode(parentNode)) { var parentElement = fileCodeModel.GetOrCreateCodeElement<EnvDTE.CodeElement>(parentNode); parentObject = ComAggregate.GetManagedObject<AbstractCodeElement>(parentElement); } else { var nodeKey = parentNode.AncestorsAndSelf() .Select(n => TryGetNodeKey(n)) .FirstOrDefault(nk => nk != SyntaxNodeKey.Empty); if (nodeKey == SyntaxNodeKey.Empty) { // This is an assembly-level attribute. parentNode = fileCodeModel.GetSyntaxRoot(); parentObject = null; } else { parentNode = fileCodeModel.LookupNode(nodeKey); var parentElement = fileCodeModel.GetOrCreateCodeElement<EnvDTE.CodeElement>(parentNode); parentObject = ComAggregate.GetManagedObject<AbstractCodeElement>(parentElement); } } string name; int ordinal; GetAttributeNameAndOrdinal(parentNode, node, out name, out ordinal); return CodeAttribute.Create(state, fileCodeModel, parentObject, name, ordinal); } protected EnvDTE80.CodeImport CreateInternalCodeImport(CodeModelState state, FileCodeModel fileCodeModel, SyntaxNode node) { SyntaxNode parentNode; string name; GetImportParentAndName(node, out parentNode, out name); AbstractCodeElement parentObj = null; if (parentNode != null) { var parent = fileCodeModel.GetOrCreateCodeElement<EnvDTE.CodeElement>(parentNode); parentObj = ComAggregate.GetManagedObject<AbstractCodeElement>(parent); } return CodeImport.Create(state, fileCodeModel, parentObj, name); } protected EnvDTE.CodeParameter CreateInternalCodeParameter(CodeModelState state, FileCodeModel fileCodeModel, SyntaxNode node) { SyntaxNode parentNode = node .Ancestors() .FirstOrDefault(n => TryGetNodeKey(n) != SyntaxNodeKey.Empty); if (parentNode == null) { throw new InvalidOperationException(); } string name = GetParameterName(node); var parent = fileCodeModel.GetOrCreateCodeElement<EnvDTE.CodeElement>(parentNode); var parentObj = ComAggregate.GetManagedObject<AbstractCodeMember>(parent); return CodeParameter.Create(state, parentObj, name); } protected EnvDTE80.CodeElement2 CreateInternalCodeOptionStatement(CodeModelState state, FileCodeModel fileCodeModel, SyntaxNode node) { string name; int ordinal; GetOptionNameAndOrdinal(node.Parent, node, out name, out ordinal); return CodeOptionsStatement.Create(state, fileCodeModel, name, ordinal); } protected EnvDTE80.CodeElement2 CreateInternalCodeInheritsStatement(CodeModelState state, FileCodeModel fileCodeModel, SyntaxNode node) { SyntaxNode parentNode = node .Ancestors() .FirstOrDefault(n => TryGetNodeKey(n) != SyntaxNodeKey.Empty); if (parentNode == null) { throw new InvalidOperationException(); } string namespaceName; int ordinal; GetInheritsNamespaceAndOrdinal(parentNode, node, out namespaceName, out ordinal); var parent = fileCodeModel.GetOrCreateCodeElement<EnvDTE.CodeElement>(parentNode); var parentObj = ComAggregate.GetManagedObject<AbstractCodeMember>(parent); return CodeInheritsStatement.Create(state, parentObj, namespaceName, ordinal); } protected EnvDTE80.CodeElement2 CreateInternalCodeImplementsStatement(CodeModelState state, FileCodeModel fileCodeModel, SyntaxNode node) { SyntaxNode parentNode = node .Ancestors() .FirstOrDefault(n => TryGetNodeKey(n) != SyntaxNodeKey.Empty); if (parentNode == null) { throw new InvalidOperationException(); } string namespaceName; int ordinal; GetImplementsNamespaceAndOrdinal(parentNode, node, out namespaceName, out ordinal); var parent = fileCodeModel.GetOrCreateCodeElement<EnvDTE.CodeElement>(parentNode); var parentObj = ComAggregate.GetManagedObject<AbstractCodeMember>(parent); return CodeImplementsStatement.Create(state, parentObj, namespaceName, ordinal); } protected EnvDTE80.CodeAttributeArgument CreateInternalCodeAttributeArgument(CodeModelState state, FileCodeModel fileCodeModel, SyntaxNode node) { SyntaxNode attributeNode; int index; GetAttributeArgumentParentAndIndex(node, out attributeNode, out index); var codeAttribute = CreateInternalCodeAttribute(state, fileCodeModel, attributeNode); var codeAttributeObj = ComAggregate.GetManagedObject<CodeAttribute>(codeAttribute); return CodeAttributeArgument.Create(state, codeAttributeObj, index); } public abstract EnvDTE.CodeElement CreateUnknownCodeElement(CodeModelState state, FileCodeModel fileCodeModel, SyntaxNode node); public abstract EnvDTE.CodeElement CreateUnknownRootNamespaceCodeElement(CodeModelState state, FileCodeModel fileCodeModel); public abstract string GetUnescapedName(string name); public abstract string GetName(SyntaxNode node); public abstract SyntaxNode GetNodeWithName(SyntaxNode node); public abstract SyntaxNode SetName(SyntaxNode node, string name); public abstract string GetFullName(SyntaxNode node, SemanticModel semanticModel); public abstract string GetFullyQualifiedName(string name, int position, SemanticModel semanticModel); public void Rename(ISymbol symbol, string newName, Solution solution) { // TODO: (tomescht) make this testable through unit tests. // Right now we have to go through the project system to find all // the outstanding CodeElements which might be affected by the // rename. This is silly. This functionality should be moved down // into the service layer. var workspace = solution.Workspace as VisualStudioWorkspaceImpl; if (workspace == null) { throw Exceptions.ThrowEFail(); } // Save the node keys. var nodeKeyValidation = new NodeKeyValidation(); foreach (var project in workspace.ProjectTracker.Projects) { nodeKeyValidation.AddProject(project); } var optionSet = workspace.Services.GetService<IOptionService>().GetOptions(); // Rename symbol. var newSolution = Renamer.RenameSymbolAsync(solution, symbol, newName, optionSet).WaitAndGetResult_CodeModel(CancellationToken.None); var changedDocuments = newSolution.GetChangedDocuments(solution); // Notify third parties of the coming rename operation and let exceptions propagate out _refactorNotifyServices.TryOnBeforeGlobalSymbolRenamed(workspace, changedDocuments, symbol, newName, throwOnFailure: true); // Update the workspace. if (!workspace.TryApplyChanges(newSolution)) { throw Exceptions.ThrowEFail(); } // Notify third parties of the completed rename operation and let exceptions propagate out _refactorNotifyServices.TryOnAfterGlobalSymbolRenamed(workspace, changedDocuments, symbol, newName, throwOnFailure: true); RenameTrackingDismisser.DismissRenameTracking(workspace, changedDocuments); // Update the node keys. nodeKeyValidation.RestoreKeys(); } public abstract bool IsValidExternalSymbol(ISymbol symbol); public abstract string GetExternalSymbolName(ISymbol symbol); public abstract string GetExternalSymbolFullName(ISymbol symbol); public VirtualTreePoint? GetStartPoint(SyntaxNode node, EnvDTE.vsCMPart? part) { return _nodeLocator.GetStartPoint(node, part); } public VirtualTreePoint? GetEndPoint(SyntaxNode node, EnvDTE.vsCMPart? part) { return _nodeLocator.GetEndPoint(node, part); } public abstract EnvDTE.vsCMAccess GetAccess(ISymbol symbol); public abstract EnvDTE.vsCMAccess GetAccess(SyntaxNode node); public abstract SyntaxNode GetNodeWithModifiers(SyntaxNode node); public abstract SyntaxNode GetNodeWithType(SyntaxNode node); public abstract SyntaxNode GetNodeWithInitializer(SyntaxNode node); public abstract SyntaxNode SetAccess(SyntaxNode node, EnvDTE.vsCMAccess access); public abstract EnvDTE.vsCMElement GetElementKind(SyntaxNode node); protected EnvDTE.vsCMElement GetElementKind(ITypeSymbol typeSymbol) { switch (typeSymbol.TypeKind) { case TypeKind.Array: case TypeKind.Class: return EnvDTE.vsCMElement.vsCMElementClass; case TypeKind.Interface: return EnvDTE.vsCMElement.vsCMElementInterface; case TypeKind.Struct: return EnvDTE.vsCMElement.vsCMElementStruct; case TypeKind.Enum: return EnvDTE.vsCMElement.vsCMElementEnum; case TypeKind.Delegate: return EnvDTE.vsCMElement.vsCMElementDelegate; case TypeKind.Module: return EnvDTE.vsCMElement.vsCMElementModule; default: Debug.Fail("Unexpected TypeKind: " + typeSymbol.TypeKind); throw Exceptions.ThrowEInvalidArg(); } } protected bool TryGetElementFromSource(CodeModelState state, Project project, ITypeSymbol typeSymbol, out EnvDTE.CodeElement element) { element = null; if (!typeSymbol.IsDefinition) { return false; } // Here's the strategy for determine what source file we'd try to return an element from. // 1. Prefer source files that we don't heuristically flag as generated code. // 2. If all of the source files are generated code, pick the first one. var generatedCodeRecognitionService = project.Solution.Workspace.Services.GetService<IGeneratedCodeRecognitionService>(); Compilation compilation = null; Tuple<DocumentId, Location> generatedCode = null; DocumentId chosenDocumentId = null; Location chosenLocation = null; foreach (var location in typeSymbol.Locations) { if (location.IsInSource) { compilation = compilation ?? project.GetCompilationAsync(CancellationToken.None).WaitAndGetResult_CodeModel(CancellationToken.None); if (compilation.ContainsSyntaxTree(location.SourceTree)) { var document = project.GetDocument(location.SourceTree); if (generatedCodeRecognitionService?.IsGeneratedCode(document) == false) { chosenLocation = location; chosenDocumentId = document.Id; break; } else { generatedCode = generatedCode ?? Tuple.Create(document.Id, location); } } } } if (chosenDocumentId == null && generatedCode != null) { chosenDocumentId = generatedCode.Item1; chosenLocation = generatedCode.Item2; } if (chosenDocumentId != null) { var fileCodeModel = state.Workspace.GetFileCodeModel(chosenDocumentId); if (fileCodeModel != null) { var underlyingFileCodeModel = ComAggregate.GetManagedObject<FileCodeModel>(fileCodeModel); element = underlyingFileCodeModel.CodeElementFromPosition(chosenLocation.SourceSpan.Start, GetElementKind(typeSymbol)); return element != null; } } return false; } public abstract bool IsAccessorNode(SyntaxNode node); public abstract MethodKind GetAccessorKind(SyntaxNode node); public abstract bool TryGetAccessorNode(SyntaxNode parentNode, MethodKind kind, out SyntaxNode accessorNode); public abstract bool TryGetParameterNode(SyntaxNode parentNode, string name, out SyntaxNode parameterNode); public abstract bool TryGetImportNode(SyntaxNode parentNode, string dottedName, out SyntaxNode importNode); public abstract bool TryGetOptionNode(SyntaxNode parentNode, string name, int ordinal, out SyntaxNode optionNode); public abstract bool TryGetInheritsNode(SyntaxNode parentNode, string name, int ordinal, out SyntaxNode inheritsNode); public abstract bool TryGetImplementsNode(SyntaxNode parentNode, string name, int ordinal, out SyntaxNode implementsNode); public abstract bool TryGetAttributeNode(SyntaxNode parentNode, string name, int ordinal, out SyntaxNode attributeNode); public abstract bool TryGetAttributeArgumentNode(SyntaxNode attributeNode, int index, out SyntaxNode attributeArgumentNode); public abstract void GetOptionNameAndOrdinal(SyntaxNode parentNode, SyntaxNode optionNode, out string name, out int ordinal); public abstract void GetInheritsNamespaceAndOrdinal(SyntaxNode inheritsNode, SyntaxNode optionNode, out string namespaceName, out int ordinal); public abstract void GetImplementsNamespaceAndOrdinal(SyntaxNode implementsNode, SyntaxNode optionNode, out string namespaceName, out int ordinal); public abstract void GetAttributeNameAndOrdinal(SyntaxNode parentNode, SyntaxNode attributeNode, out string name, out int ordinal); public abstract SyntaxNode GetAttributeTargetNode(SyntaxNode attributeNode); public abstract string GetAttributeTarget(SyntaxNode attributeNode); public abstract string GetAttributeValue(SyntaxNode attributeNode); public abstract SyntaxNode SetAttributeTarget(SyntaxNode attributeNode, string value); public abstract SyntaxNode SetAttributeValue(SyntaxNode attributeNode, string value); public abstract SyntaxNode GetNodeWithAttributes(SyntaxNode node); public abstract SyntaxNode GetEffectiveParentForAttribute(SyntaxNode node); public abstract SyntaxNode CreateAttributeNode(string name, string value, string target = null); public abstract void GetAttributeArgumentParentAndIndex(SyntaxNode attributeArgumentNode, out SyntaxNode attributeNode, out int index); public abstract SyntaxNode CreateAttributeArgumentNode(string name, string value); public abstract string GetAttributeArgumentValue(SyntaxNode attributeArgumentNode); public abstract string GetImportAlias(SyntaxNode node); public abstract string GetImportNamespaceOrType(SyntaxNode node); public abstract void GetImportParentAndName(SyntaxNode importNode, out SyntaxNode namespaceNode, out string name); public abstract SyntaxNode CreateImportNode(string name, string alias = null); public abstract string GetParameterName(SyntaxNode node); public virtual string GetParameterFullName(SyntaxNode node) { return GetParameterName(node); } public abstract EnvDTE80.vsCMParameterKind GetParameterKind(SyntaxNode node); public abstract SyntaxNode SetParameterKind(SyntaxNode node, EnvDTE80.vsCMParameterKind kind); public abstract SyntaxNode CreateParameterNode(string name, string type); public abstract EnvDTE.vsCMFunction ValidateFunctionKind(SyntaxNode containerNode, EnvDTE.vsCMFunction kind, string name); public abstract bool SupportsEventThrower { get; } public abstract bool GetCanOverride(SyntaxNode memberNode); public abstract SyntaxNode SetCanOverride(SyntaxNode memberNode, bool value); public abstract EnvDTE80.vsCMClassKind GetClassKind(SyntaxNode typeNode, INamedTypeSymbol typeSymbol); public abstract SyntaxNode SetClassKind(SyntaxNode typeNode, EnvDTE80.vsCMClassKind kind); public abstract string GetComment(SyntaxNode node); public abstract SyntaxNode SetComment(SyntaxNode node, string value); public abstract EnvDTE80.vsCMConstKind GetConstKind(SyntaxNode variableNode); public abstract SyntaxNode SetConstKind(SyntaxNode variableNode, EnvDTE80.vsCMConstKind kind); public abstract EnvDTE80.vsCMDataTypeKind GetDataTypeKind(SyntaxNode typeNode, INamedTypeSymbol symbol); public abstract SyntaxNode SetDataTypeKind(SyntaxNode typeNode, EnvDTE80.vsCMDataTypeKind kind); public abstract string GetDocComment(SyntaxNode node); public abstract SyntaxNode SetDocComment(SyntaxNode node, string value); public abstract EnvDTE.vsCMFunction GetFunctionKind(IMethodSymbol symbol); public abstract EnvDTE80.vsCMInheritanceKind GetInheritanceKind(SyntaxNode typeNode, INamedTypeSymbol typeSymbol); public abstract SyntaxNode SetInheritanceKind(SyntaxNode typeNode, EnvDTE80.vsCMInheritanceKind kind); public abstract bool GetIsAbstract(SyntaxNode memberNode, ISymbol symbol); public abstract SyntaxNode SetIsAbstract(SyntaxNode memberNode, bool value); public abstract bool GetIsConstant(SyntaxNode variableNode); public abstract SyntaxNode SetIsConstant(SyntaxNode variableNode, bool value); public abstract bool GetIsDefault(SyntaxNode propertyNode); public abstract SyntaxNode SetIsDefault(SyntaxNode propertyNode, bool value); public abstract bool GetIsGeneric(SyntaxNode memberNode); public abstract bool GetIsPropertyStyleEvent(SyntaxNode eventNode); public abstract bool GetIsShared(SyntaxNode memberNode, ISymbol symbol); public abstract SyntaxNode SetIsShared(SyntaxNode memberNode, bool value); public abstract bool GetMustImplement(SyntaxNode memberNode); public abstract SyntaxNode SetMustImplement(SyntaxNode memberNode, bool value); public abstract EnvDTE80.vsCMOverrideKind GetOverrideKind(SyntaxNode memberNode); public abstract SyntaxNode SetOverrideKind(SyntaxNode memberNode, EnvDTE80.vsCMOverrideKind kind); public abstract EnvDTE80.vsCMPropertyKind GetReadWrite(SyntaxNode memberNode); public abstract SyntaxNode SetType(SyntaxNode node, ITypeSymbol typeSymbol); public abstract Document Delete(Document document, SyntaxNode node); public abstract string GetMethodXml(SyntaxNode node, SemanticModel semanticModel); public abstract string GetInitExpression(SyntaxNode node); public abstract SyntaxNode AddInitExpression(SyntaxNode node, string value); public abstract CodeGenerationDestination GetDestination(SyntaxNode containerNode); protected abstract Accessibility GetDefaultAccessibility(SymbolKind targetSymbolKind, CodeGenerationDestination destination); public Accessibility GetAccessibility(EnvDTE.vsCMAccess access, SymbolKind targetSymbolKind, CodeGenerationDestination destination = CodeGenerationDestination.Unspecified) { // Note: Some EnvDTE.vsCMAccess members aren't "bitwise-mutually-exclusive" // Specifically, vsCMAccessProjectOrProtected (12) is a combination of vsCMAccessProject (4) and vsCMAccessProtected (8) // We therefore check for this first. if ((access & EnvDTE.vsCMAccess.vsCMAccessProjectOrProtected) == EnvDTE.vsCMAccess.vsCMAccessProjectOrProtected) { return Accessibility.ProtectedOrInternal; } else if ((access & EnvDTE.vsCMAccess.vsCMAccessPrivate) != 0) { return Accessibility.Private; } else if ((access & EnvDTE.vsCMAccess.vsCMAccessProject) != 0) { return Accessibility.Internal; } else if ((access & EnvDTE.vsCMAccess.vsCMAccessProtected) != 0) { return Accessibility.Protected; } else if ((access & EnvDTE.vsCMAccess.vsCMAccessPublic) != 0) { return Accessibility.Public; } else if ((access & EnvDTE.vsCMAccess.vsCMAccessDefault) != 0) { return GetDefaultAccessibility(targetSymbolKind, destination); } else { throw new ArgumentException(ServicesVSResources.InvalidAccess, "access"); } } public bool GetWithEvents(EnvDTE.vsCMAccess access) { return (access & EnvDTE.vsCMAccess.vsCMAccessWithEvents) != 0; } protected SpecialType GetSpecialType(EnvDTE.vsCMTypeRef type) { // TODO(DustinCa): Verify this list against VB switch (type) { case EnvDTE.vsCMTypeRef.vsCMTypeRefBool: return SpecialType.System_Boolean; case EnvDTE.vsCMTypeRef.vsCMTypeRefByte: return SpecialType.System_Byte; case EnvDTE.vsCMTypeRef.vsCMTypeRefChar: return SpecialType.System_Char; case EnvDTE.vsCMTypeRef.vsCMTypeRefDecimal: return SpecialType.System_Decimal; case EnvDTE.vsCMTypeRef.vsCMTypeRefDouble: return SpecialType.System_Double; case EnvDTE.vsCMTypeRef.vsCMTypeRefFloat: return SpecialType.System_Single; case EnvDTE.vsCMTypeRef.vsCMTypeRefInt: return SpecialType.System_Int32; case EnvDTE.vsCMTypeRef.vsCMTypeRefLong: return SpecialType.System_Int64; case EnvDTE.vsCMTypeRef.vsCMTypeRefObject: return SpecialType.System_Object; case EnvDTE.vsCMTypeRef.vsCMTypeRefShort: return SpecialType.System_Int16; case EnvDTE.vsCMTypeRef.vsCMTypeRefString: return SpecialType.System_String; case EnvDTE.vsCMTypeRef.vsCMTypeRefVoid: return SpecialType.System_Void; default: // TODO: Support vsCMTypeRef2? It doesn't appear that Dev10 C# does... throw new ArgumentException(); } } private ITypeSymbol GetSpecialType(EnvDTE.vsCMTypeRef type, Compilation compilation) { return compilation.GetSpecialType(GetSpecialType(type)); } protected abstract ITypeSymbol GetTypeSymbolFromPartialName(string partialName, SemanticModel semanticModel, int position); public abstract ITypeSymbol GetTypeSymbolFromFullName(string fullName, Compilation compilation); public ITypeSymbol GetTypeSymbol(object type, SemanticModel semanticModel, int position) { if (type is EnvDTE.CodeTypeRef) { return GetTypeSymbolFromPartialName(((EnvDTE.CodeTypeRef)type).AsString, semanticModel, position); } else if (type is EnvDTE.CodeType) { return GetTypeSymbolFromFullName(((EnvDTE.CodeType)type).FullName, semanticModel.Compilation); } ITypeSymbol typeSymbol; if (type is EnvDTE.vsCMTypeRef || type is int) { typeSymbol = GetSpecialType((EnvDTE.vsCMTypeRef)type, semanticModel.Compilation); } else if (type is string) { typeSymbol = GetTypeSymbolFromPartialName((string)type, semanticModel, position); } else { throw new InvalidOperationException(); } if (typeSymbol == null) { throw new ArgumentException(); } return typeSymbol; } public abstract SyntaxNode CreateReturnDefaultValueStatement(ITypeSymbol type); protected abstract int GetAttributeIndexInContainer( SyntaxNode containerNode, Func<SyntaxNode, bool> predicate); /// <summary> /// The position argument is a VARIANT which may be an EnvDTE.CodeElement, an int or a string /// representing the name of a member. This function translates the argument and returns the /// 1-based position of the specified attribute. /// </summary> public int PositionVariantToAttributeInsertionIndex(object position, SyntaxNode containerNode, FileCodeModel fileCodeModel) { return PositionVariantToInsertionIndex( position, containerNode, fileCodeModel, GetAttributeIndexInContainer, GetAttributeNodes); } protected abstract int GetAttributeArgumentIndexInContainer( SyntaxNode containerNode, Func<SyntaxNode, bool> predicate); public int PositionVariantToAttributeArgumentInsertionIndex(object position, SyntaxNode containerNode, FileCodeModel fileCodeModel) { return PositionVariantToInsertionIndex( position, containerNode, fileCodeModel, GetAttributeArgumentIndexInContainer, GetAttributeArgumentNodes); } protected abstract int GetImportIndexInContainer( SyntaxNode containerNode, Func<SyntaxNode, bool> predicate); public int PositionVariantToImportInsertionIndex(object position, SyntaxNode containerNode, FileCodeModel fileCodeModel) { return PositionVariantToInsertionIndex( position, containerNode, fileCodeModel, GetImportIndexInContainer, GetImportNodes); } protected abstract int GetParameterIndexInContainer( SyntaxNode containerNode, Func<SyntaxNode, bool> predicate); public int PositionVariantToParameterInsertionIndex(object position, SyntaxNode containerNode, FileCodeModel fileCodeModel) { return PositionVariantToInsertionIndex( position, containerNode, fileCodeModel, GetParameterIndexInContainer, GetParameterNodes); } /// <summary> /// Finds the index of the first child within the container for which <paramref name="predicate"/> returns true. /// Note that the result is a 1-based as that is what code model expects. Returns -1 if no match is found. /// </summary> protected abstract int GetMemberIndexInContainer( SyntaxNode containerNode, Func<SyntaxNode, bool> predicate); /// <summary> /// The position argument is a VARIANT which may be an EnvDTE.CodeElement, an int or a string /// representing the name of a member. This function translates the argument and returns the /// 1-based position of the specified member. /// </summary> public int PositionVariantToMemberInsertionIndex(object position, SyntaxNode containerNode, FileCodeModel fileCodeModel) { return PositionVariantToInsertionIndex( position, containerNode, fileCodeModel, GetMemberIndexInContainer, n => GetMemberNodes(n, includeSelf: false, recursive: false, logicalFields: false, onlySupportedNodes: false)); } private int PositionVariantToInsertionIndex( object position, SyntaxNode containerNode, FileCodeModel fileCodeModel, Func<SyntaxNode, Func<SyntaxNode, bool>, int> getIndexInContainer, Func<SyntaxNode, IEnumerable<SyntaxNode>> getChildNodes) { int result; if (position is int) { result = (int)position; } else if (position is EnvDTE.CodeElement) { var codeElement = ComAggregate.TryGetManagedObject<AbstractCodeElement>(position); if (codeElement == null || codeElement.FileCodeModel != fileCodeModel) { throw Exceptions.ThrowEInvalidArg(); } var positionNode = codeElement.LookupNode(); if (positionNode == null) { throw Exceptions.ThrowEFail(); } result = getIndexInContainer(containerNode, child => child == positionNode); } else if (position is string) { var name = (string)position; result = getIndexInContainer(containerNode, child => GetName(child) == name); } else if (position == null || position == Type.Missing) { result = 0; } else { // Nothing we can handle... throw Exceptions.ThrowEInvalidArg(); } // -1 means to insert at the end, so we'll return the last child. return result == -1 ? getChildNodes(containerNode).ToArray().Length : result; } protected abstract SyntaxNode GetFieldFromVariableNode(SyntaxNode variableNode); protected abstract SyntaxNode GetVariableFromFieldNode(SyntaxNode fieldNode); protected abstract SyntaxNode GetAttributeFromAttributeDeclarationNode(SyntaxNode attributeDeclarationNode); protected void GetNodesAroundInsertionIndex<TSyntaxNode>( TSyntaxNode containerNode, int childIndexToInsertAfter, out TSyntaxNode insertBeforeNode, out TSyntaxNode insertAfterNode) where TSyntaxNode : SyntaxNode { var childNodes = GetLogicalMemberNodes(containerNode).ToArray(); // Note: childIndexToInsertAfter is 1-based but can be 0, meaning insert before any other members. // If it isn't 0, it means to insert the member node *after* the node at the 1-based index. Debug.Assert(childIndexToInsertAfter >= 0 && childIndexToInsertAfter <= childNodes.Length); // Initialize the nodes that we'll insert the new member before and after. insertBeforeNode = null; insertAfterNode = null; if (childIndexToInsertAfter == 0) { if (childNodes.Length > 0) { insertBeforeNode = (TSyntaxNode)childNodes[0]; } } else { insertAfterNode = (TSyntaxNode)childNodes[childIndexToInsertAfter - 1]; if (childIndexToInsertAfter < childNodes.Length) { insertBeforeNode = (TSyntaxNode)childNodes[childIndexToInsertAfter]; } } if (insertBeforeNode != null) { insertBeforeNode = (TSyntaxNode)GetFieldFromVariableNode(insertBeforeNode); } if (insertAfterNode != null) { insertAfterNode = (TSyntaxNode)GetFieldFromVariableNode(insertAfterNode); } } private int GetMemberInsertionIndex(SyntaxNode container, int insertionIndex) { var childNodes = GetLogicalMemberNodes(container).ToArray(); // Note: childIndexToInsertAfter is 1-based but can be 0, meaning insert before any other members. // If it isn't 0, it means to insert the member node *after* the node at the 1-based index. Debug.Assert(insertionIndex >= 0 && insertionIndex <= childNodes.Length); if (insertionIndex == 0) { return 0; } else { var nodeAtIndex = GetFieldFromVariableNode(childNodes[insertionIndex - 1]); return GetMemberNodes(container, includeSelf: false, recursive: false, logicalFields: false, onlySupportedNodes: false).ToList().IndexOf(nodeAtIndex) + 1; } } private int GetAttributeArgumentInsertionIndex(SyntaxNode container, int insertionIndex) { return insertionIndex; } private int GetAttributeInsertionIndex(SyntaxNode container, int insertionIndex) { return insertionIndex; } private int GetImportInsertionIndex(SyntaxNode container, int insertionIndex) { return insertionIndex; } private int GetParameterInsertionIndex(SyntaxNode container, int insertionIndex) { return insertionIndex; } protected abstract bool IsCodeModelNode(SyntaxNode node); protected abstract TextSpan GetSpanToFormat(SyntaxNode root, TextSpan span); protected abstract SyntaxNode InsertMemberNodeIntoContainer(int index, SyntaxNode member, SyntaxNode container); protected abstract SyntaxNode InsertAttributeArgumentIntoContainer(int index, SyntaxNode attributeArgument, SyntaxNode container); protected abstract SyntaxNode InsertAttributeListIntoContainer(int index, SyntaxNode attribute, SyntaxNode container); protected abstract SyntaxNode InsertImportIntoContainer(int index, SyntaxNode import, SyntaxNode container); protected abstract SyntaxNode InsertParameterIntoContainer(int index, SyntaxNode parameter, SyntaxNode container); private Document FormatAnnotatedNode(Document document, SyntaxAnnotation annotation, IEnumerable<IFormattingRule> additionalRules, CancellationToken cancellationToken) { var root = document.GetSyntaxRootAsync(cancellationToken).WaitAndGetResult_CodeModel(cancellationToken); var annotatedNode = root.GetAnnotatedNodesAndTokens(annotation).Single().AsNode(); var formattingSpan = GetSpanToFormat(root, annotatedNode.FullSpan); var formattingRules = Formatter.GetDefaultFormattingRules(document); if (additionalRules != null) { formattingRules = additionalRules.Concat(formattingRules); } return Formatter.FormatAsync( document, new TextSpan[] { formattingSpan }, options: null, rules: formattingRules, cancellationToken: cancellationToken).WaitAndGetResult_CodeModel(cancellationToken); } private SyntaxNode InsertNode( Document document, bool batchMode, int insertionIndex, SyntaxNode containerNode, SyntaxNode node, Func<int, SyntaxNode, SyntaxNode, SyntaxNode> insertNodeIntoContainer, CancellationToken cancellationToken, out Document newDocument) { var root = document.GetSyntaxRootAsync(cancellationToken).WaitAndGetResult_CodeModel(cancellationToken); // Annotate the member we're inserting so we can get back to it. var annotation = new SyntaxAnnotation(); var gen = SyntaxGenerator.GetGenerator(document); node = node.WithAdditionalAnnotations(annotation); if (gen.GetDeclarationKind(node) != DeclarationKind.NamespaceImport) { // REVIEW: how simplifier ever worked for code model? nobody added simplifier.Annotation before? node = node.WithAdditionalAnnotations(Simplifier.Annotation); } var newContainerNode = insertNodeIntoContainer(insertionIndex, node, containerNode); var newRoot = root.ReplaceNode(containerNode, newContainerNode); document = document.WithSyntaxRoot(newRoot); if (!batchMode) { document = Simplifier.ReduceAsync(document, annotation, optionSet: null, cancellationToken: cancellationToken).WaitAndGetResult_CodeModel(cancellationToken); } document = FormatAnnotatedNode(document, annotation, new[] { _lineAdjustmentFormattingRule, _endRegionFormattingRule }, cancellationToken); // out param newDocument = document; // new node return document .GetSyntaxRootAsync(cancellationToken).WaitAndGetResult_CodeModel(cancellationToken) .GetAnnotatedNodesAndTokens(annotation) .Single() .AsNode(); } /// <summary> /// Override to determine whether <param name="newNode"/> adds a method body to <param name="node"/>. /// This is used to determine whether a blank line should be added inside the body when formatting. /// </summary> protected abstract bool AddBlankLineToMethodBody(SyntaxNode node, SyntaxNode newNode); public Document UpdateNode( Document document, SyntaxNode node, SyntaxNode newNode, CancellationToken cancellationToken) { // Annotate the member we're inserting so we can get back to it. var annotation = new SyntaxAnnotation(); // REVIEW: how simplifier ever worked for code model? nobody added simplifier.Annotation before? var annotatedNode = newNode.WithAdditionalAnnotations(annotation, Simplifier.Annotation); var oldRoot = document.GetSyntaxRootAsync(cancellationToken).WaitAndGetResult_CodeModel(cancellationToken); var newRoot = oldRoot.ReplaceNode(node, annotatedNode); document = document.WithSyntaxRoot(newRoot); var additionalRules = AddBlankLineToMethodBody(node, newNode) ? SpecializedCollections.SingletonEnumerable(_lineAdjustmentFormattingRule) : null; document = FormatAnnotatedNode(document, annotation, additionalRules, cancellationToken); return document; } public SyntaxNode InsertAttribute( Document document, bool batchMode, int insertionIndex, SyntaxNode containerNode, SyntaxNode attributeNode, CancellationToken cancellationToken, out Document newDocument) { var finalNode = InsertNode( document, batchMode, GetAttributeInsertionIndex(containerNode, insertionIndex), containerNode, attributeNode, InsertAttributeListIntoContainer, cancellationToken, out newDocument); return GetAttributeFromAttributeDeclarationNode(finalNode); } public SyntaxNode InsertAttributeArgument( Document document, bool batchMode, int insertionIndex, SyntaxNode containerNode, SyntaxNode attributeArgumentNode, CancellationToken cancellationToken, out Document newDocument) { var finalNode = InsertNode( document, batchMode, GetAttributeArgumentInsertionIndex(containerNode, insertionIndex), containerNode, attributeArgumentNode, InsertAttributeArgumentIntoContainer, cancellationToken, out newDocument); return finalNode; } public SyntaxNode InsertImport( Document document, bool batchMode, int insertionIndex, SyntaxNode containerNode, SyntaxNode importNode, CancellationToken cancellationToken, out Document newDocument) { var finalNode = InsertNode( document, batchMode, GetImportInsertionIndex(containerNode, insertionIndex), containerNode, importNode, InsertImportIntoContainer, cancellationToken, out newDocument); return finalNode; } public SyntaxNode InsertParameter( Document document, bool batchMode, int insertionIndex, SyntaxNode containerNode, SyntaxNode parameterNode, CancellationToken cancellationToken, out Document newDocument) { var finalNode = InsertNode( document, batchMode, GetParameterInsertionIndex(containerNode, insertionIndex), containerNode, parameterNode, InsertParameterIntoContainer, cancellationToken, out newDocument); return finalNode; } public SyntaxNode InsertMember( Document document, bool batchMode, int insertionIndex, SyntaxNode containerNode, SyntaxNode memberNode, CancellationToken cancellationToken, out Document newDocument) { var finalNode = InsertNode( document, batchMode, GetMemberInsertionIndex(containerNode, insertionIndex), containerNode, memberNode, InsertMemberNodeIntoContainer, cancellationToken, out newDocument); return GetVariableFromFieldNode(finalNode); } public Queue<CodeModelEvent> CollectCodeModelEvents(SyntaxTree oldTree, SyntaxTree newTree) { return _eventCollector.Collect(oldTree, newTree); } public abstract bool IsNamespace(SyntaxNode node); public abstract bool IsType(SyntaxNode node); public virtual IList<string> GetHandledEventNames(SyntaxNode method, SemanticModel semanticModel) { // descendants may override (particularly VB). return SpecializedCollections.EmptyList<string>(); } public virtual bool HandlesEvent(string eventName, SyntaxNode method, SemanticModel semanticModel) { // descendants may override (particularly VB). return false; } public virtual Document AddHandlesClause(Document document, string eventName, SyntaxNode method, CancellationToken cancellationToken) { // descendants may override (particularly VB). return document; } public virtual Document RemoveHandlesClause(Document document, string eventName, SyntaxNode method, CancellationToken cancellationToken) { // descendants may override (particularly VB). return document; } public abstract string[] GetFunctionExtenderNames(); public abstract object GetFunctionExtender(string name, SyntaxNode node, ISymbol symbol); public abstract string[] GetPropertyExtenderNames(); public abstract object GetPropertyExtender(string name, SyntaxNode node, ISymbol symbol); public abstract string[] GetExternalTypeExtenderNames(); public abstract object GetExternalTypeExtender(string name, string externalLocation); public abstract string[] GetTypeExtenderNames(); public abstract object GetTypeExtender(string name, AbstractCodeType codeType); public abstract bool IsValidBaseType(SyntaxNode node, ITypeSymbol typeSymbol); public abstract SyntaxNode AddBase(SyntaxNode node, ITypeSymbol typeSymbol, SemanticModel semanticModel, int? position); public abstract SyntaxNode RemoveBase(SyntaxNode node, ITypeSymbol typeSymbol, SemanticModel semanticModel); public abstract bool IsValidInterfaceType(SyntaxNode node, ITypeSymbol typeSymbol); public abstract SyntaxNode AddImplementedInterface(SyntaxNode node, ITypeSymbol typeSymbol, SemanticModel semanticModel, int? position); public abstract SyntaxNode RemoveImplementedInterface(SyntaxNode node, ITypeSymbol typeSymbol, SemanticModel semanticModel); public abstract string GetPrototype(SyntaxNode node, ISymbol symbol, PrototypeFlags flags); public virtual void AttachFormatTrackingToBuffer(ITextBuffer buffer) { // can be override by languages if needed } public virtual void DetachFormatTrackingToBuffer(ITextBuffer buffer) { // can be override by languages if needed } public virtual void EnsureBufferFormatted(ITextBuffer buffer) { // can be override by languages if needed } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Data; using System.Reflection; using System.Collections.Generic; using log4net; using Mono.Data.SqliteClient; using OpenMetaverse; using OpenSim.Framework; namespace OpenSim.Data.SQLite { /// <summary> /// An asset storage interface for the SQLite database system /// </summary> public class SQLiteAssetData : AssetDataBase { private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); private const string SelectAssetSQL = "select * from assets where UUID=:UUID"; private const string SelectAssetMetadataSQL = "select Name, Description, Type, Temporary, UUID from assets limit :start, :count"; private const string DeleteAssetSQL = "delete from assets where UUID=:UUID"; private const string InsertAssetSQL = "insert into assets(UUID, Name, Description, Type, Local, Temporary, Data) values(:UUID, :Name, :Description, :Type, :Local, :Temporary, :Data)"; private const string UpdateAssetSQL = "update assets set Name=:Name, Description=:Description, Type=:Type, Local=:Local, Temporary=:Temporary, Data=:Data where UUID=:UUID"; private const string assetSelect = "select * from assets"; private SqliteConnection m_conn; override public void Dispose() { if (m_conn != null) { m_conn.Close(); m_conn = null; } } /// <summary> /// <list type="bullet"> /// <item>Initialises AssetData interface</item> /// <item>Loads and initialises a new SQLite connection and maintains it.</item> /// <item>use default URI if connect string is empty.</item> /// </list> /// </summary> /// <param name="dbconnect">connect string</param> override public void Initialise(string dbconnect) { if (dbconnect == string.Empty) { dbconnect = "URI=file:Asset.db,version=3"; } m_conn = new SqliteConnection(dbconnect); m_conn.Open(); Assembly assem = GetType().Assembly; Migration m = new Migration(m_conn, assem, "AssetStore"); m.Update(); return; } /// <summary> /// Fetch Asset /// </summary> /// <param name="uuid">UUID of ... ?</param> /// <returns>Asset base</returns> override public AssetBase GetAsset(UUID uuid) { lock (this) { using (SqliteCommand cmd = new SqliteCommand(SelectAssetSQL, m_conn)) { cmd.Parameters.Add(new SqliteParameter(":UUID", uuid.ToString())); using (IDataReader reader = cmd.ExecuteReader()) { if (reader.Read()) { AssetBase asset = buildAsset(reader); reader.Close(); return asset; } else { reader.Close(); return null; } } } } } /// <summary> /// Create an asset /// </summary> /// <param name="asset">Asset Base</param> override public void StoreAsset(AssetBase asset) { //m_log.Info("[ASSET DB]: Creating Asset " + asset.FullID.ToString()); if (ExistsAsset(asset.FullID)) { LogAssetLoad(asset); lock (this) { using (SqliteCommand cmd = new SqliteCommand(UpdateAssetSQL, m_conn)) { cmd.Parameters.Add(new SqliteParameter(":UUID", asset.FullID.ToString())); cmd.Parameters.Add(new SqliteParameter(":Name", asset.Name)); cmd.Parameters.Add(new SqliteParameter(":Description", asset.Description)); cmd.Parameters.Add(new SqliteParameter(":Type", asset.Type)); cmd.Parameters.Add(new SqliteParameter(":Local", asset.Local)); cmd.Parameters.Add(new SqliteParameter(":Temporary", asset.Temporary)); cmd.Parameters.Add(new SqliteParameter(":Data", asset.Data)); cmd.ExecuteNonQuery(); } } } else { lock (this) { using (SqliteCommand cmd = new SqliteCommand(InsertAssetSQL, m_conn)) { cmd.Parameters.Add(new SqliteParameter(":UUID", asset.FullID.ToString())); cmd.Parameters.Add(new SqliteParameter(":Name", asset.Name)); cmd.Parameters.Add(new SqliteParameter(":Description", asset.Description)); cmd.Parameters.Add(new SqliteParameter(":Type", asset.Type)); cmd.Parameters.Add(new SqliteParameter(":Local", asset.Local)); cmd.Parameters.Add(new SqliteParameter(":Temporary", asset.Temporary)); cmd.Parameters.Add(new SqliteParameter(":Data", asset.Data)); cmd.ExecuteNonQuery(); } } } } /// <summary> /// Some... logging functionnality /// </summary> /// <param name="asset"></param> private static void LogAssetLoad(AssetBase asset) { string temporary = asset.Temporary ? "Temporary" : "Stored"; string local = asset.Local ? "Local" : "Remote"; int assetLength = (asset.Data != null) ? asset.Data.Length : 0; m_log.Info("[ASSET DB]: " + string.Format("Loaded {5} {4} Asset: [{0}][{3}] \"{1}\":{2} ({6} bytes)", asset.FullID, asset.Name, asset.Description, asset.Type, temporary, local, assetLength)); } /// <summary> /// Check if an asset exist in database /// </summary> /// <param name="uuid">The asset UUID</param> /// <returns>True if exist, or false.</returns> override public bool ExistsAsset(UUID uuid) { lock (this) { using (SqliteCommand cmd = new SqliteCommand(SelectAssetSQL, m_conn)) { cmd.Parameters.Add(new SqliteParameter(":UUID", uuid.ToString())); using (IDataReader reader = cmd.ExecuteReader()) { if (reader.Read()) { reader.Close(); return true; } else { reader.Close(); return false; } } } } } /// <summary> /// Delete an asset from database /// </summary> /// <param name="uuid"></param> public void DeleteAsset(UUID uuid) { using (SqliteCommand cmd = new SqliteCommand(DeleteAssetSQL, m_conn)) { cmd.Parameters.Add(new SqliteParameter(":UUID", uuid.ToString())); cmd.ExecuteNonQuery(); } } /// <summary> /// /// </summary> /// <param name="row"></param> /// <returns></returns> private static AssetBase buildAsset(IDataReader row) { // TODO: this doesn't work yet because something more // interesting has to be done to actually get these values // back out. Not enough time to figure it out yet. AssetBase asset = new AssetBase(); asset.FullID = new UUID((String) row["UUID"]); asset.Name = (String) row["Name"]; asset.Description = (String) row["Description"]; asset.Type = Convert.ToSByte(row["Type"]); asset.Local = Convert.ToBoolean(row["Local"]); asset.Temporary = Convert.ToBoolean(row["Temporary"]); asset.Data = (byte[]) row["Data"]; return asset; } private static AssetMetadata buildAssetMetadata(IDataReader row) { AssetMetadata metadata = new AssetMetadata(); metadata.FullID = new UUID((string) row["UUID"]); metadata.Name = (string) row["Name"]; metadata.Description = (string) row["Description"]; metadata.Type = Convert.ToSByte(row["Type"]); metadata.Temporary = Convert.ToBoolean(row["Temporary"]); // Not sure if this is correct. // Current SHA1s are not stored/computed. metadata.SHA1 = new byte[] {}; return metadata; } /// <summary> /// Returns a list of AssetMetadata objects. The list is a subset of /// the entire data set offset by <paramref name="start" /> containing /// <paramref name="count" /> elements. /// </summary> /// <param name="start">The number of results to discard from the total data set.</param> /// <param name="count">The number of rows the returned list should contain.</param> /// <returns>A list of AssetMetadata objects.</returns> public override List<AssetMetadata> FetchAssetMetadataSet(int start, int count) { List<AssetMetadata> retList = new List<AssetMetadata>(count); lock (this) { using (SqliteCommand cmd = new SqliteCommand(SelectAssetMetadataSQL, m_conn)) { cmd.Parameters.Add(new SqliteParameter(":start", start)); cmd.Parameters.Add(new SqliteParameter(":count", count)); using (IDataReader reader = cmd.ExecuteReader()) { while (reader.Read()) { AssetMetadata metadata = buildAssetMetadata(reader); retList.Add(metadata); } } } } return retList; } /*********************************************************************** * * Database Binding functions * * These will be db specific due to typing, and minor differences * in databases. * **********************************************************************/ #region IPlugin interface /// <summary> /// /// </summary> override public string Version { get { Module module = GetType().Module; // string dllName = module.Assembly.ManifestModule.Name; Version dllVersion = module.Assembly.GetName().Version; return string.Format("{0}.{1}.{2}.{3}", dllVersion.Major, dllVersion.Minor, dllVersion.Build, dllVersion.Revision); } } /// <summary> /// Initialise the AssetData interface using default URI /// </summary> override public void Initialise() { Initialise("URI=file:Asset.db,version=3"); } /// <summary> /// Name of this DB provider /// </summary> override public string Name { get { return "SQLite Asset storage engine"; } } #endregion } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Fixtures.Azure.AcceptanceTestsPaging { using System.Linq; using Microsoft.Rest; using Microsoft.Rest.Azure; using Models; /// <summary> /// Long-running Operation for AutoRest /// </summary> public partial class AutoRestPagingTestService : Microsoft.Rest.ServiceClient<AutoRestPagingTestService>, IAutoRestPagingTestService, IAzureClient { /// <summary> /// The base URI of the service. /// </summary> public System.Uri BaseUri { get; set; } /// <summary> /// Gets or sets json serialization settings. /// </summary> public Newtonsoft.Json.JsonSerializerSettings SerializationSettings { get; private set; } /// <summary> /// Gets or sets json deserialization settings. /// </summary> public Newtonsoft.Json.JsonSerializerSettings DeserializationSettings { get; private set; } /// <summary> /// Credentials needed for the client to connect to Azure. /// </summary> public Microsoft.Rest.ServiceClientCredentials Credentials { get; private set; } /// <summary> /// Gets or sets the preferred language for the response. /// </summary> public string AcceptLanguage { get; set; } /// <summary> /// Gets or sets the retry timeout in seconds for Long Running Operations. /// Default value is 30. /// </summary> public int? LongRunningOperationRetryTimeout { get; set; } /// <summary> /// When set to true a unique x-ms-client-request-id value is generated and /// included in each request. Default is true. /// </summary> public bool? GenerateClientRequestId { get; set; } /// <summary> /// Gets the IPagingOperations. /// </summary> public virtual IPagingOperations Paging { get; private set; } /// <summary> /// Initializes a new instance of the AutoRestPagingTestService class. /// </summary> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> protected AutoRestPagingTestService(params System.Net.Http.DelegatingHandler[] handlers) : base(handlers) { this.Initialize(); } /// <summary> /// Initializes a new instance of the AutoRestPagingTestService class. /// </summary> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> protected AutoRestPagingTestService(System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : base(rootHandler, handlers) { this.Initialize(); } /// <summary> /// Initializes a new instance of the AutoRestPagingTestService class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> protected AutoRestPagingTestService(System.Uri baseUri, params System.Net.Http.DelegatingHandler[] handlers) : this(handlers) { if (baseUri == null) { throw new System.ArgumentNullException("baseUri"); } this.BaseUri = baseUri; } /// <summary> /// Initializes a new instance of the AutoRestPagingTestService class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> protected AutoRestPagingTestService(System.Uri baseUri, System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (baseUri == null) { throw new System.ArgumentNullException("baseUri"); } this.BaseUri = baseUri; } /// <summary> /// Initializes a new instance of the AutoRestPagingTestService class. /// </summary> /// <param name='credentials'> /// Required. Credentials needed for the client to connect to Azure. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public AutoRestPagingTestService(Microsoft.Rest.ServiceClientCredentials credentials, params System.Net.Http.DelegatingHandler[] handlers) : this(handlers) { if (credentials == null) { throw new System.ArgumentNullException("credentials"); } this.Credentials = credentials; if (this.Credentials != null) { this.Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes a new instance of the AutoRestPagingTestService class. /// </summary> /// <param name='credentials'> /// Required. Credentials needed for the client to connect to Azure. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public AutoRestPagingTestService(Microsoft.Rest.ServiceClientCredentials credentials, System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (credentials == null) { throw new System.ArgumentNullException("credentials"); } this.Credentials = credentials; if (this.Credentials != null) { this.Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes a new instance of the AutoRestPagingTestService class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='credentials'> /// Required. Credentials needed for the client to connect to Azure. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public AutoRestPagingTestService(System.Uri baseUri, Microsoft.Rest.ServiceClientCredentials credentials, params System.Net.Http.DelegatingHandler[] handlers) : this(handlers) { if (baseUri == null) { throw new System.ArgumentNullException("baseUri"); } if (credentials == null) { throw new System.ArgumentNullException("credentials"); } this.BaseUri = baseUri; this.Credentials = credentials; if (this.Credentials != null) { this.Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes a new instance of the AutoRestPagingTestService class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='credentials'> /// Required. Credentials needed for the client to connect to Azure. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public AutoRestPagingTestService(System.Uri baseUri, Microsoft.Rest.ServiceClientCredentials credentials, System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (baseUri == null) { throw new System.ArgumentNullException("baseUri"); } if (credentials == null) { throw new System.ArgumentNullException("credentials"); } this.BaseUri = baseUri; this.Credentials = credentials; if (this.Credentials != null) { this.Credentials.InitializeServiceClient(this); } } /// <summary> /// An optional partial-method to perform custom initialization. /// </summary> partial void CustomInitialize(); /// <summary> /// Initializes client properties. /// </summary> private void Initialize() { this.Paging = new PagingOperations(this); this.BaseUri = new System.Uri("http://localhost"); this.AcceptLanguage = "en-US"; this.LongRunningOperationRetryTimeout = 30; this.GenerateClientRequestId = true; SerializationSettings = new Newtonsoft.Json.JsonSerializerSettings { Formatting = Newtonsoft.Json.Formatting.Indented, DateFormatHandling = Newtonsoft.Json.DateFormatHandling.IsoDateFormat, DateTimeZoneHandling = Newtonsoft.Json.DateTimeZoneHandling.Utc, NullValueHandling = Newtonsoft.Json.NullValueHandling.Ignore, ReferenceLoopHandling = Newtonsoft.Json.ReferenceLoopHandling.Serialize, ContractResolver = new Microsoft.Rest.Serialization.ReadOnlyJsonContractResolver(), Converters = new System.Collections.Generic.List<Newtonsoft.Json.JsonConverter> { new Microsoft.Rest.Serialization.Iso8601TimeSpanConverter() } }; DeserializationSettings = new Newtonsoft.Json.JsonSerializerSettings { DateFormatHandling = Newtonsoft.Json.DateFormatHandling.IsoDateFormat, DateTimeZoneHandling = Newtonsoft.Json.DateTimeZoneHandling.Utc, NullValueHandling = Newtonsoft.Json.NullValueHandling.Ignore, ReferenceLoopHandling = Newtonsoft.Json.ReferenceLoopHandling.Serialize, ContractResolver = new Microsoft.Rest.Serialization.ReadOnlyJsonContractResolver(), Converters = new System.Collections.Generic.List<Newtonsoft.Json.JsonConverter> { new Microsoft.Rest.Serialization.Iso8601TimeSpanConverter() } }; CustomInitialize(); DeserializationSettings.Converters.Add(new Microsoft.Rest.Azure.CloudErrorJsonConverter()); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. using System; using System.Net.Http; using System.Threading.Tasks; using AngleSharp.Dom.Html; using Identity.DefaultUI.WebSite; using Microsoft.AspNetCore.Identity.FunctionalTests.Account; using Microsoft.AspNetCore.Identity.FunctionalTests.Account.Manage; using Newtonsoft.Json; using Newtonsoft.Json.Linq; using Xunit; namespace Microsoft.AspNetCore.Identity.FunctionalTests { public class UserStories { internal static async Task<Index> RegisterNewUserAsync(HttpClient client, string userName = null, string password = null) { userName = userName ?? $"{Guid.NewGuid()}@example.com"; password = password ?? $"!Test.Password1$"; var index = await Index.CreateAsync(client); var register = await index.ClickRegisterLinkAsync(); return await register.SubmitRegisterFormForValidUserAsync(userName, password); } internal static async Task<RegisterConfirmation> RegisterNewUserAsyncWithConfirmation(HttpClient client, string userName = null, string password = null, bool hasRealEmailSender = false) { userName = userName ?? $"{Guid.NewGuid()}@example.com"; password = password ?? $"!Test.Password1$"; var index = await Index.CreateAsync(client); var register = await index.ClickRegisterLinkAsync(); return await register.SubmitRegisterFormWithConfirmation(userName, password, hasRealEmailSender); } internal static async Task<Index> LoginExistingUserAsync(HttpClient client, string userName, string password) { var index = await Index.CreateAsync(client); var login = await index.ClickLoginLinkAsync(); return await login.LoginValidUserAsync(userName, password); } internal static async Task LoginFailsWithWrongPasswordAsync(HttpClient client, string userName, string password) { var index = await Index.CreateAsync(client); var login = await index.ClickLoginLinkAsync(); await login.LoginWrongPasswordAsync(userName, password); } internal static async Task<DefaultUIPage> LockoutExistingUserAsync(HttpClient client, string userName, string password) { var index = await Index.CreateAsync(client); var login = await index.ClickLoginLinkAsync(); return await login.LockoutUserAsync(userName, password); } // This is via login page internal static async Task<Index> RegisterNewUserWithSocialLoginAsync(HttpClient client, string userName, string email) { var index = await Index.CreateAsync(client, new DefaultUIContext().WithSocialLoginEnabled()); var login = await index.ClickLoginLinkAsync(); var contosoLogin = await login.ClickLoginWithContosoLinkAsync(); var externalLogin = await contosoLogin.SendNewUserNameAsync(userName); return await externalLogin.SendEmailAsync(email); } internal static async Task<RegisterConfirmation> RegisterNewUserWithSocialLoginWithConfirmationAsync(HttpClient client, string userName, string email, bool hasRealEmailSender = false) { var index = await Index.CreateAsync(client, new DefaultUIContext().WithSocialLoginEnabled()); var login = await index.ClickLoginLinkAsync(); var contosoLogin = await login.ClickLoginWithContosoLinkAsync(); var externalLogin = await contosoLogin.SendNewUserNameAsync(userName); return await externalLogin.SendEmailWithConfirmationAsync(email, hasRealEmailSender); } internal static async Task<Index> RegisterNewUserWithSocialLoginAsyncViaRegisterPage(HttpClient client, string userName, string email) { var index = await Index.CreateAsync(client, new DefaultUIContext().WithSocialLoginEnabled()); var register = await index.ClickRegisterLinkAsync(); var contosoLogin = await register.ClickLoginWithContosoLinkAsync(); var externalLogin = await contosoLogin.SendNewUserNameAsync(userName); return await externalLogin.SendEmailAsync(email); } internal static async Task<Email> SendEmailConfirmationLinkAsync(Index index) { var manage = await index.ClickManageLinkAsync(); var email = await manage.ClickEmailLinkAsync(); return await email.SendConfirmationEmailAsync(); } internal static async Task<Email> SendUpdateEmailAsync(Index index, string newEmail) { var manage = await index.ClickManageLinkAsync(); var email = await manage.ClickEmailLinkAsync(); return await email.SendUpdateEmailAsync(newEmail); } internal static async Task<Index> LoginWithSocialLoginAsync(HttpClient client, string userName) { var index = await Index.CreateAsync( client, new DefaultUIContext() .WithSocialLoginEnabled() .WithExistingUser()); var login = await index.ClickLoginLinkAsync(); var contosoLogin = await login.ClickLoginWithContosoLinkAsync(); return await contosoLogin.SendExistingUserNameAsync(userName); } internal static async Task<Index> LoginExistingUser2FaAsync(HttpClient client, string userName, string password, string twoFactorKey) { var index = await Index.CreateAsync(client); var loginWithPassword = await index.ClickLoginLinkAsync(); var login2Fa = await loginWithPassword.PasswordLoginValidUserWith2FaAsync(userName, password); return await login2Fa.Send2FACodeAsync(twoFactorKey); } internal static async Task<ShowRecoveryCodes> EnableTwoFactorAuthentication(Index index, bool consent = true) { var manage = await index.ClickManageLinkAsync(); var twoFactor = await manage.ClickTwoFactorLinkAsync(consent); if (consent) { var enableAuthenticator = await twoFactor.ClickEnableAuthenticatorLinkAsync(); return await enableAuthenticator.SendValidCodeAsync(); } return null; } internal static async Task<ResetAuthenticator> ResetAuthenticator(Index index) { var manage = await index.ClickManageLinkAsync(); var twoFactor = await manage.ClickTwoFactorEnabledLinkAsync(); var resetAuthenticator = await twoFactor.ClickResetAuthenticatorLinkAsync(); return await resetAuthenticator.ResetAuthenticatorAsync(); } internal static async Task<Index> LoginExistingUserRecoveryCodeAsync( HttpClient client, string userName, string password, string recoveryCode) { var index = await Index.CreateAsync(client); var loginWithPassword = await index.ClickLoginLinkAsync(); var login2Fa = await loginWithPassword.PasswordLoginValidUserWith2FaAsync(userName, password); var loginRecoveryCode = await login2Fa.ClickRecoveryCodeLinkAsync(); return await loginRecoveryCode.SendRecoveryCodeAsync(recoveryCode); } internal static async Task<ConfirmEmail> ConfirmEmailAsync(IdentityEmail email, HttpClient client) { var emailBody = HtmlAssert.IsHtmlFragment(email.Body); var linkElement = HtmlAssert.HasElement("a", emailBody); var link = Assert.IsAssignableFrom<IHtmlAnchorElement>(linkElement); return await ConfirmEmail.Create(link, client, new DefaultUIContext() .WithAuthenticatedUser() .WithExistingUser() .WithConfirmedEmail()); } internal static async Task ResendConfirmEmailAsync(HttpClient client, string email) { var index = await Index.CreateAsync(client); var login = await index.ClickLoginLinkAsync(); var reconfirm = await login.ClickReconfirmEmailLinkAsync(); var response = await reconfirm.ResendAsync(email); ResponseAssert.IsOK(response); Assert.Contains("Verification email sent.", await response.Content.ReadAsStringAsync()); } internal static async Task<ForgotPasswordConfirmation> ForgotPasswordAsync(HttpClient client, string userName) { var index = await Index.CreateAsync(client); var login = await index.ClickLoginLinkAsync(); var forgotPassword = await login.ClickForgotPasswordLinkAsync(); return await forgotPassword.SendForgotPasswordAsync(userName); } internal static async Task<ResetPasswordConfirmation> ResetPasswordAsync(HttpClient client, IdentityEmail resetPasswordEmail, string email, string newPassword) { var emailBody = HtmlAssert.IsHtmlFragment(resetPasswordEmail.Body); var linkElement = HtmlAssert.HasElement("a", emailBody); var link = Assert.IsAssignableFrom<IHtmlAnchorElement>(linkElement); var resetPassword = await ResetPassword.CreateAsync(link, client, new DefaultUIContext().WithExistingUser()); return await resetPassword.SendNewPasswordAsync(email, newPassword); } internal static async Task<ChangePassword> ChangePasswordAsync(Index index, string oldPassword, string newPassword) { var manage = await index.ClickManageLinkAsync(); var changePassword = await manage.ClickChangePasswordLinkAsync(); return await changePassword.ChangePasswordAsync(oldPassword, newPassword); } internal static async Task<SetPassword> SetPasswordAsync(Index index, string newPassword) { var manage = await index.ClickManageLinkAsync(); var setPassword = await manage.ClickChangePasswordLinkExternalLoginAsync(); return await setPassword.SetPasswordAsync(newPassword); } internal static async Task<ManageExternalLogin> LinkExternalLoginAsync(Index index, string loginEmail) { var manage = await index.ClickManageLinkWithExternalLoginAsync(); var linkLogin = await manage.ClickLinkLoginAsync(); return await linkLogin.LinkExternalLoginAsync(loginEmail); } internal static async Task<RemoveExternalLogin> RemoveExternalLoginAsync(ManageExternalLogin manageExternalLogin, string loginEmail) { // Provide an email to link an external account to var removeLogin = await manageExternalLogin.ManageExternalLoginAsync(loginEmail); // Remove external login return await removeLogin.RemoveLoginAsync("Contoso", "Contoso"); } internal static async Task<Index> DeleteUser(Index index, string password) { var manage = await index.ClickManageLinkAsync(); var personalData = await manage.ClickPersonalDataLinkAsync(); var deleteUser = await personalData.ClickDeleteLinkAsync(); return await deleteUser.Delete(password); } internal static async Task<JObject> DownloadPersonalData(Index index, string userName) { var manage = await index.ClickManageLinkAsync(); var personalData = await manage.ClickPersonalDataLinkAsync(); var download = await personalData.SubmitDownloadForm(); ResponseAssert.IsOK(download); return JsonConvert.DeserializeObject<JObject>(await download.Content.ReadAsStringAsync()); } internal static async Task AcceptCookiePolicy(HttpClient client) { var goToPrivacy = await client.GetAsync("/Privacy"); } } }
// ******************************************************************************************************** // Product Name: DotSpatial.Symbology.dll // Description: Contains the business logic for symbology layers and symbol categories. // ******************************************************************************************************** // The contents of this file are subject to the MIT License (MIT) // you may not use this file except in compliance with the License. You may obtain a copy of the License at // http://dotspatial.codeplex.com/license // // Software distributed under the License is distributed on an "AS IS" basis, WITHOUT WARRANTY OF // ANY KIND, either expressed or implied. See the License for the specific language governing rights and // limitations under the License. // // The Original Code is from MapWindow.dll version 6.0 // // The Initial Developer of this Original Code is Ted Dunsford. Created 8/25/2008 2:46:23 PM // // Contributor(s): (Open source contributors should list themselves and their modifications here). // // ******************************************************************************************************** using System.Collections.Generic; using System.ComponentModel; using System.IO; using DotSpatial.Data; using DotSpatial.Serialization; namespace DotSpatial.Symbology { /// <summary> /// GeoImageLayer /// </summary> public class ImageLayer : Layer, IImageLayer { #region Private Variables private IImageSymbolizer _symbolizer; #endregion #region Constructors /// <summary> /// Creates a blank image layer that needs to be configured later. /// </summary> public ImageLayer() { Symbolizer = new ImageSymbolizer(); } /// <summary> /// Creates a new instance of the ImageLayer by opening the specified fileName /// </summary> /// <param name="fileName"></param> public ImageLayer(string fileName) { Symbolizer = new ImageSymbolizer(); DataSet = DataManager.DefaultDataManager.OpenImage(fileName); } /// <summary> /// Creates a new instance of the ImageLayer by opening the specified fileName, relaying progress to the /// specified handler, and automatically adds the new layer to the specified container. /// </summary> /// <param name="fileName">The fileName to open</param> /// <param name="progressHandler">A ProgressHandler that can receive progress updates</param> /// <param name="container">The layer list that should contain this image layer</param> public ImageLayer(string fileName, IProgressHandler progressHandler, ICollection<ILayer> container) : base(container) { Symbolizer = new ImageSymbolizer(); DataSet = DataManager.DefaultDataManager.OpenImage(fileName, progressHandler); } /// <summary> /// Creates a new instance of the image layer by opening the specified fileName and /// relaying progress to the specified handler. /// </summary> /// <param name="fileName">The fileName to open</param> /// <param name="progressHandler">The progressHandler</param> public ImageLayer(string fileName, IProgressHandler progressHandler) { Symbolizer = new ImageSymbolizer(); DataSet = DataManager.DefaultDataManager.OpenImage(fileName, progressHandler); } /// <summary> /// Creates a new instance of GeoImageLayer /// </summary> public ImageLayer(IImageData baseImage) { Symbolizer = new ImageSymbolizer(); DataSet = baseImage; } /// <summary> /// Creates a new instance of a GeoImageLayer /// </summary> /// <param name="baseImage">The image to draw as a layer</param> /// <param name="container">The Layers collection that keeps track of the image layer</param> public ImageLayer(IImageData baseImage, ICollection<ILayer> container) : base(container) { Symbolizer = new ImageSymbolizer(); DataSet = baseImage; } #endregion #region IImageLayer Members /// <summary> /// Gets or sets the underlying data for this object /// </summary> [Serialize("ImageData")] public new IImageData DataSet { get { return base.DataSet as IImageData; } set { var current = DataSet; if (current == value) return; base.DataSet = value; OnDataSetChanged(value); } } protected virtual void OnDataSetChanged(IImageData value) { IsVisible = value != null; // Change legendText only if image data refers to real file if (value != null && File.Exists(value.Filename)) { LegendText = Path.GetFileName(value.Filename); } } /// <summary> /// Gets the geographic bounding envelope for the image /// </summary> public override Extent Extent { get { if (DataSet == null) return null; return DataSet.Extent; } } /// <summary> /// Gets or sets a class that has some basic parameters that control how the image layer /// is drawn. /// </summary> [Browsable(false)] [ShallowCopy] [Serialize("Symbolizer")] public IImageSymbolizer Symbolizer { get { return _symbolizer; } set { _symbolizer = value; OnItemChanged(); } } /// <summary> /// Gets or sets the image being drawn by this layer /// </summary> public IImageData Image { get { return DataSet; } set { DataSet = value; } } #endregion /// <summary> /// Gets or sets custom actions for ImageLayer /// </summary> [Browsable(false)] public IImageLayerActions ImageLayerActions { get; set; } /// <summary> /// Handles when this layer should show its properties by firing the event on the shared event sender /// </summary> /// <param name="e"></param> protected override void OnShowProperties(HandledEventArgs e) { var ila = ImageLayerActions; if (ila != null) { ila.ShowProperties(this); } } /// <summary> /// Handles export data from this layer. /// </summary> protected override void OnExportData() { var ila = ImageLayerActions; if (ila != null) { ila.ExportData(Image); } } /// <summary> /// Dispose memory objects. /// </summary> /// <param name="disposeManagedResources">True if managed memory objects should be set to null.</param> protected override void Dispose(bool disposeManagedResources) { if (disposeManagedResources) { _symbolizer = null; ImageLayerActions = null; } base.Dispose(disposeManagedResources); } } }
// // Encog(tm) Core v3.3 - .Net Version // http://www.heatonresearch.com/encog/ // // Copyright 2008-2014 Heaton Research, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // For more information on Heaton Research copyrights, licenses // and trademarks visit: // http://www.heatonresearch.com/copyright // using System.Collections.Generic; using Encog.ML.Data; using Encog.ML.Data.Basic; using Encog.ML.HMM.Alog; using Encog.ML.HMM.Distributions; using Encog.ML.Train; using Encog.ML.Train.Strategy; using Encog.Neural.Networks.Training.Propagation; namespace Encog.ML.HMM.Train.KMeans { /// <summary> /// Train a Hidden Markov Model (HMM) with the KMeans algorithm. Makes use of /// KMeans clustering to estimate the transitional and observational /// probabilities for the HMM. /// /// Unlike Baum Welch training, this method does not require a prior estimate of /// the HMM model, it starts from scratch. /// /// Faber, Clustering and the Continuous k-Means Algorithm, Los Alamos Science, /// no. 22, 1994. /// </summary> public class TrainKMeans : IMLTrain { /// <summary> /// The clusters. /// </summary> private readonly Clusters _clusters; /// <summary> /// The HMM to use as a model. /// </summary> private readonly HiddenMarkovModel _modelHmm; /// <summary> /// The number of states. /// </summary> private readonly int _states; /// <summary> /// The training data. /// </summary> private readonly IMLSequenceSet _training; /// <summary> /// Keep track of if we are done. /// </summary> private bool _done; /// <summary> /// The current HMM. /// </summary> private HiddenMarkovModel _method; /// <summary> /// Construct a KMeans trainer. /// </summary> /// <param name="method">The HMM.</param> /// <param name="sequences">The training data.</param> public TrainKMeans(HiddenMarkovModel method, IMLSequenceSet sequences) { _method = method; _modelHmm = method; _states = method.StateCount; _training = sequences; _clusters = new Clusters(_states, sequences); _done = false; } #region IMLTrain Members /// <summary> /// The iteration number that we are currently on. /// </summary> public int IterationNumber { get; set; } /// <inheritdoc/> public void AddStrategy(IStrategy strategy) { } /// <inheritdoc/> public bool CanContinue { get { return false; } } /// <inheritdoc/> public void FinishTraining() { } /// <inheritdoc/> public double Error { get { return _done ? 0 : 100; } set { } } /// <inheritdoc/> public TrainingImplementationType ImplementationType { get { return TrainingImplementationType.Iterative; } } /// <inheritdoc/> public IMLMethod Method { get { return _method; } } /// <inheritdoc/> public IList<IStrategy> Strategies { get { return null; } } /// <inheritdoc/> public IMLDataSet Training { get { return _training; } } /// <inheritdoc/> public bool TrainingDone { get { return _done; } } /// <inheritdoc/> public void Iteration() { HiddenMarkovModel hmm = _modelHmm.CloneStructure(); LearnPi(hmm); LearnTransition(hmm); LearnOpdf(hmm); _done = OptimizeCluster(hmm); _method = hmm; } /// <inheritdoc/> public void Iteration(int count) { for (int i = 0; i < count; i++) { Iteration(); } } /// <inheritdoc/> public TrainingContinuation Pause() { return null; } /// <inheritdoc/> public void Resume(TrainingContinuation state) { } #endregion /// <summary> /// Learn the distribution. /// </summary> /// <param name="hmm">The HMM.</param> private void LearnOpdf(HiddenMarkovModel hmm) { for (int i = 0; i < hmm.StateCount; i++) { ICollection<IMLDataPair> clusterObservations = _clusters .Cluster(i); if (clusterObservations.Count < 1) { IStateDistribution o = _modelHmm.CreateNewDistribution(); hmm.StateDistributions[i] = o; } else { var temp = new BasicMLDataSet(); foreach (IMLDataPair pair in clusterObservations) { temp.Add(pair); } hmm.StateDistributions[i].Fit(temp); } } } /// <summary> /// Learn Pi, the starting probabilities. /// </summary> /// <param name="hmm">The HMM.</param> private void LearnPi(HiddenMarkovModel hmm) { var pi = new double[_states]; for (int i = 0; i < _states; i++) { pi[i] = 0.0; } foreach (IMLDataSet sequence in _training.Sequences) { pi[_clusters.Cluster(sequence[0])]++; } for (int i = 0; i < _states; i++) { hmm.Pi[i] = pi[i]/(int) _training.Count; } } /// <summary> /// Learn the state transitions. /// </summary> /// <param name="hmm">The HMM.</param> private void LearnTransition(HiddenMarkovModel hmm) { for (int i = 0; i < hmm.StateCount; i++) { for (int j = 0; j < hmm.StateCount; j++) { hmm.TransitionProbability[i][j] = 0.0; } } foreach (IMLDataSet obsSeq in _training.Sequences) { if (obsSeq.Count < 2) { continue; } int secondState = _clusters.Cluster(obsSeq[0]); for (int i = 1; i < obsSeq.Count; i++) { int firstState = secondState; secondState = _clusters.Cluster(obsSeq[i]); hmm.TransitionProbability[firstState][secondState] = hmm.TransitionProbability[firstState][secondState] + 1.0; } } /* Normalize Aij array */ for (int i = 0; i < hmm.StateCount; i++) { double sum = 0; for (int j = 0; j < hmm.StateCount; j++) { sum += hmm.TransitionProbability[i][j]; } if (sum == 0.0) { for (int j = 0; j < hmm.StateCount; j++) { hmm.TransitionProbability[i][j] = 1.0/hmm.StateCount; } } else { for (int j = 0; j < hmm.StateCount; j++) { hmm.TransitionProbability[i][j] /= sum; } } } } /// <summary> /// Optimize the clusters. /// </summary> /// <param name="hmm">The HMM.</param> /// <returns>True if the cluster was not modified.</returns> private bool OptimizeCluster(HiddenMarkovModel hmm) { bool result = false; foreach (IMLDataSet obsSeq in _training.Sequences) { var vc = new ViterbiCalculator(obsSeq, hmm); int[] states = vc.CopyStateSequence(); for (int i = 0; i < states.Length; i++) { IMLDataPair o = obsSeq[i]; if (_clusters.Cluster(o) != states[i]) { result = true; _clusters.Remove(o, _clusters.Cluster(o)); _clusters.Put(o, states[i]); } } } return !result; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Linq; using System.Runtime.InteropServices; using System.Text; using System.Threading.Tasks; using Xunit; namespace System.Security.Tests { public static class SecureStringTests { [Theory] [InlineData(0)] [InlineData(1)] [InlineData(2)] [InlineData(ushort.MaxValue + 1)] // max allowed length public static void Ctor(int length) { string expected = CreateString(length); using (SecureString actual = CreateSecureString(expected)) { AssertEquals(expected, actual); } } [Fact] public static unsafe void Ctor_CharInt_Invalid() { Assert.Throws<ArgumentNullException>("value", () => new SecureString(null, 0)); Assert.Throws<ArgumentOutOfRangeException>("length", () => { fixed (char* chars = "test") new SecureString(chars, -1); }); Assert.Throws<ArgumentOutOfRangeException>("length", () => CreateSecureString(CreateString(ushort.MaxValue + 2 /*65537: Max allowed length is 65536*/))); } [Fact] public static void AppendChar() { using (SecureString testString = CreateSecureString(string.Empty)) { var expected = new StringBuilder(); foreach (var ch in new[] { 'a', 'b', 'c', 'd' }) { testString.AppendChar(ch); expected.Append(ch); AssertEquals(expected.ToString(), testString); } AssertEquals(expected.ToString(), testString); // check last case one more time for idempotency } } [Fact] public static void AppendChar_TooLong_Throws() { using (SecureString ss = CreateSecureString(CreateString(ushort.MaxValue + 1))) { Assert.Throws<ArgumentOutOfRangeException>("capacity", () => ss.AppendChar('a')); } } [Theory] [InlineData("")] [InlineData("test")] public static void Clear(string initialValue) { using (SecureString testString = CreateSecureString(initialValue)) { AssertEquals(initialValue, testString); testString.Clear(); AssertEquals(string.Empty, testString); } } [Fact] public static void MakeReadOnly_ReadingSucceeds_AllOtherModificationsThrow() { string initialValue = "test"; using (SecureString ss = CreateSecureString(initialValue)) { Assert.False(ss.IsReadOnly()); ss.MakeReadOnly(); Assert.True(ss.IsReadOnly()); // Reads succeed AssertEquals(initialValue, ss); Assert.Equal(initialValue.Length, ss.Length); using (SecureString other = ss.Copy()) { AssertEquals(initialValue, other); } ss.MakeReadOnly(); // ok to call again // Writes throw Assert.Throws<InvalidOperationException>(() => ss.AppendChar('a')); Assert.Throws<InvalidOperationException>(() => ss.Clear()); Assert.Throws<InvalidOperationException>(() => ss.InsertAt(0, 'a')); Assert.Throws<InvalidOperationException>(() => ss.RemoveAt(0)); Assert.Throws<InvalidOperationException>(() => ss.SetAt(0, 'a')); } } [Fact] public static void Dispose_AllOtherOperationsThrow() { SecureString ss = CreateSecureString("test"); ss.Dispose(); Assert.Throws<ObjectDisposedException>(() => ss.AppendChar('a')); Assert.Throws<ObjectDisposedException>(() => ss.Clear()); Assert.Throws<ObjectDisposedException>(() => ss.Copy()); Assert.Throws<ObjectDisposedException>(() => ss.InsertAt(0, 'a')); Assert.Throws<ObjectDisposedException>(() => ss.IsReadOnly()); Assert.Throws<ObjectDisposedException>(() => ss.Length); Assert.Throws<ObjectDisposedException>(() => ss.MakeReadOnly()); Assert.Throws<ObjectDisposedException>(() => ss.RemoveAt(0)); Assert.Throws<ObjectDisposedException>(() => ss.SetAt(0, 'a')); Assert.Throws<ObjectDisposedException>(() => SecureStringMarshal.SecureStringToCoTaskMemAnsi(ss)); Assert.Throws<ObjectDisposedException>(() => SecureStringMarshal.SecureStringToCoTaskMemUnicode(ss)); Assert.Throws<ObjectDisposedException>(() => SecureStringMarshal.SecureStringToGlobalAllocAnsi(ss)); Assert.Throws<ObjectDisposedException>(() => SecureStringMarshal.SecureStringToGlobalAllocUnicode(ss)); ss.Dispose(); // ok to call again } [Theory] [InlineData(0)] [InlineData(1)] [InlineData(4000)] public static void Copy(int length) { string expected = CreateString(length); using (SecureString testString = CreateSecureString(expected)) using (SecureString copyString = testString.Copy()) { Assert.False(copyString.IsReadOnly()); AssertEquals(expected, copyString); } using (SecureString testString = CreateSecureString(expected)) { testString.MakeReadOnly(); using (SecureString copyString = testString.Copy()) { Assert.False(copyString.IsReadOnly()); AssertEquals(expected, copyString); } } } [Fact] public static void InsertAt() { using (SecureString testString = CreateSecureString("bd")) { testString.InsertAt(0, 'a'); AssertEquals("abd", testString); testString.InsertAt(3, 'e'); AssertEquals("abde", testString); testString.InsertAt(2, 'c'); AssertEquals("abcde", testString); } } [Fact] public static void InsertAt_LongString() { string initialValue = CreateString(ushort.MaxValue); for (int iter = 0; iter < 2; iter++) { using (SecureString testString = CreateSecureString(initialValue)) { string expected = initialValue; AssertEquals(expected, testString); if (iter == 0) // add at the beginning { expected = 'b' + expected; testString.InsertAt(0, 'b'); } else // add at the end { expected += 'b'; testString.InsertAt(testString.Length, 'b'); } AssertEquals(expected, testString); } } } [Fact] public static void InsertAt_Invalid_Throws() { using (SecureString testString = CreateSecureString("bd")) { Assert.Throws<ArgumentOutOfRangeException>("index", () => testString.InsertAt(-1, 'S')); Assert.Throws<ArgumentOutOfRangeException>("index", () => testString.InsertAt(6, 'S')); } using (SecureString testString = CreateSecureString(CreateString(ushort.MaxValue + 1))) { Assert.Throws<ArgumentOutOfRangeException>("capacity", () => testString.InsertAt(22, 'S')); } } [Fact] public static void RemoveAt() { using (SecureString testString = CreateSecureString("abcde")) { testString.RemoveAt(3); AssertEquals("abce", testString); testString.RemoveAt(3); AssertEquals("abc", testString); testString.RemoveAt(0); AssertEquals("bc", testString); testString.RemoveAt(1); AssertEquals("b", testString); testString.RemoveAt(0); AssertEquals("", testString); testString.AppendChar('f'); AssertEquals("f", testString); testString.AppendChar('g'); AssertEquals("fg", testString); testString.RemoveAt(0); AssertEquals("g", testString); } } [Fact] public static void RemoveAt_Largest() { string expected = CreateString(ushort.MaxValue + 1); using (SecureString testString = CreateSecureString(expected)) { testString.RemoveAt(22); expected = expected.Substring(0, 22) + expected.Substring(23); AssertEquals(expected, testString); } } [Fact] public static void RemoveAt_Invalid_Throws() { using (SecureString testString = CreateSecureString("test")) { Assert.Throws<ArgumentOutOfRangeException>("index", () => testString.RemoveAt(-1)); Assert.Throws<ArgumentOutOfRangeException>("index", () => testString.RemoveAt(testString.Length)); Assert.Throws<ArgumentOutOfRangeException>("index", () => testString.RemoveAt(testString.Length + 1)); } } [Fact] public static void SetAt() { using (SecureString testString = CreateSecureString("abc")) { testString.SetAt(2, 'f'); AssertEquals("abf", testString); testString.SetAt(0, 'd'); AssertEquals("dbf", testString); testString.SetAt(1, 'e'); AssertEquals("def", testString); } string expected = CreateString(ushort.MaxValue + 1); using (SecureString testString = CreateSecureString(expected)) { testString.SetAt(22, 'b'); char[] chars = expected.ToCharArray(); chars[22] = 'b'; AssertEquals(new string(chars), testString); } } [Fact] public static void SetAt_Invalid_Throws() { using (SecureString testString = CreateSecureString("test")) { Assert.Throws<ArgumentOutOfRangeException>("index", () => testString.SetAt(-1, 'a')); Assert.Throws<ArgumentOutOfRangeException>("index", () => testString.SetAt(testString.Length, 'b')); Assert.Throws<ArgumentOutOfRangeException>("index", () => testString.SetAt(testString.Length + 1, 'c')); } } [Fact] public static void SecureStringMarshal_NullArgsAllowed_IntPtrZero() { Assert.Throws<ArgumentNullException>("s", () => SecureStringMarshal.SecureStringToCoTaskMemAnsi(null)); Assert.Throws<ArgumentNullException>("s", () => SecureStringMarshal.SecureStringToCoTaskMemUnicode(null)); Assert.Throws<ArgumentNullException>("s", () => SecureStringMarshal.SecureStringToGlobalAllocAnsi(null)); Assert.Throws<ArgumentNullException>("s", () => SecureStringMarshal.SecureStringToGlobalAllocUnicode(null)); } [Fact] public static void RepeatedCtorDispose() { string str = CreateString(4000); for (int i = 0; i < 1000; i++) { CreateSecureString(str).Dispose(); } } [Theory] [InlineData(0, false)] [InlineData(0, true)] [InlineData(1, false)] [InlineData(1, true)] [InlineData(2, false)] [InlineData(2, true)] [InlineData(1000, false)] [InlineData(1000, true)] public static void SecureStringMarshal_Ansi_Roundtrip(int length, bool allocHGlobal) { string input = new string(Enumerable .Range(0, length) .Select(i => (char)('a' + i)) // include non-ASCII chars .ToArray()); IntPtr marshaledString = Marshal.StringToHGlobalAnsi(input); string expectedAnsi = Marshal.PtrToStringAnsi(marshaledString); Marshal.FreeHGlobal(marshaledString); using (SecureString ss = CreateSecureString(input)) { IntPtr marshaledSecureString = allocHGlobal ? SecureStringMarshal.SecureStringToGlobalAllocAnsi(ss) : SecureStringMarshal.SecureStringToCoTaskMemAnsi(ss); string actualAnsi = Marshal.PtrToStringAnsi(marshaledSecureString); if (allocHGlobal) { Marshal.FreeHGlobal(marshaledSecureString); } else { Marshal.FreeCoTaskMem(marshaledSecureString); } Assert.Equal(expectedAnsi, actualAnsi); } } [Theory] [InlineData(0, false)] [InlineData(0, true)] [InlineData(1, false)] [InlineData(1, true)] [InlineData(2, false)] [InlineData(2, true)] [InlineData(1000, false)] [InlineData(1000, true)] public static void SecureStringMarshal_Unicode_Roundtrip(int length, bool allocHGlobal) { string input = new string(Enumerable .Range(0, length) .Select(i => (char)('a' + i)) // include non-ASCII chars .ToArray()); IntPtr marshaledString = Marshal.StringToHGlobalUni(input); string expectedAnsi = Marshal.PtrToStringUni(marshaledString); Marshal.FreeHGlobal(marshaledString); using (SecureString ss = CreateSecureString(input)) { IntPtr marshaledSecureString = allocHGlobal ? SecureStringMarshal.SecureStringToGlobalAllocUnicode(ss) : SecureStringMarshal.SecureStringToCoTaskMemUnicode(ss); string actualAnsi = Marshal.PtrToStringUni(marshaledSecureString); if (allocHGlobal) { Marshal.FreeHGlobal(marshaledSecureString); } else { Marshal.FreeCoTaskMem(marshaledSecureString); } Assert.Equal(expectedAnsi, actualAnsi); } } [Fact] public static void GrowAndContract_Small() { var rand = new Random(42); var sb = new StringBuilder(string.Empty); using (SecureString testString = CreateSecureString(string.Empty)) { for (int loop = 0; loop < 3; loop++) { for (int i = 0; i < 100; i++) { char c = (char)('a' + rand.Next(0, 26)); int addPos = rand.Next(0, sb.Length); testString.InsertAt(addPos, c); sb.Insert(addPos, c); AssertEquals(sb.ToString(), testString); } while (sb.Length > 0) { int removePos = rand.Next(0, sb.Length); testString.RemoveAt(removePos); sb.Remove(removePos, 1); AssertEquals(sb.ToString(), testString); } } } } [Fact] public static void Grow_Large() { string starting = CreateString(6000); var sb = new StringBuilder(starting); using (SecureString testString = CreateSecureString(starting)) { for (int i = 0; i < 4000; i++) { char c = (char)('a' + (i % 26)); testString.AppendChar(c); sb.Append(c); } AssertEquals(sb.ToString(), testString); } } [OuterLoop] [Theory] [InlineData(5)] public static void ThreadSafe_Stress(int executionTimeSeconds) // do some minimal verification that an instance can be used concurrently { using (var ss = new SecureString()) { DateTimeOffset end = DateTimeOffset.UtcNow + TimeSpan.FromSeconds(executionTimeSeconds); Task.WaitAll(Enumerable.Range(0, Environment.ProcessorCount).Select(_ => Task.Run(() => { var rand = new Random(Task.CurrentId.Value); while (DateTimeOffset.UtcNow < end) { char c = (char)rand.Next(0, char.MaxValue); switch (rand.Next(12)) { case 0: ss.AppendChar(c); break; case 1: ss.InsertAt(0, c); break; case 2: try { ss.SetAt(0, c); } catch (ArgumentOutOfRangeException) { } break; case 3: ss.Copy().Dispose(); break; case 4: Assert.InRange(ss.Length, 0, ushort.MaxValue + 1); break; case 5: ss.Clear(); break; case 6: try { ss.RemoveAt(0); } catch (ArgumentOutOfRangeException) { } break; case 7: Assert.False(ss.IsReadOnly()); break; case 8: Marshal.ZeroFreeCoTaskMemAnsi(SecureStringMarshal.SecureStringToCoTaskMemAnsi(ss)); break; case 9: Marshal.ZeroFreeCoTaskMemUnicode(SecureStringMarshal.SecureStringToCoTaskMemUnicode(ss)); break; case 10: Marshal.ZeroFreeGlobalAllocAnsi(SecureStringMarshal.SecureStringToGlobalAllocAnsi(ss)); break; case 11: Marshal.ZeroFreeGlobalAllocUnicode(SecureStringMarshal.SecureStringToGlobalAllocUnicode(ss)); break; } } })).ToArray()); } } private static unsafe void AssertEquals(string expected, SecureString actual) { Assert.Equal(expected, CreateString(actual)); } private static string CreateString(int length) { var sb = new StringBuilder(); for (int i = 0; i < length; i++) { sb.Append((char)('a' + (i % 26))); } return sb.ToString(); } // WARNING: // A key value of SecureString is in keeping string data off of the GC heap, such that it can // be reliably cleared when no longer needed. Creating a SecureString from a string or converting // a SecureString to a string diminishes that value. These conversion functions are for testing that // SecureString works, and does not represent a pattern to follow in any non-test situation. private static unsafe SecureString CreateSecureString(string value) { if (string.IsNullOrEmpty(value)) { return new SecureString(); } fixed (char* mychars = value.ToCharArray()) { return new SecureString(mychars, value.Length); } } private static string CreateString(SecureString value) { IntPtr ptr = SecureStringMarshal.SecureStringToGlobalAllocUnicode(value); try { return Marshal.PtrToStringUni(ptr); } finally { Marshal.ZeroFreeGlobalAllocUnicode(ptr); } } } }
using J2N.Text; using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Util; using System; using System.Collections.Generic; namespace Lucene.Net.Codecs.Lucene3x { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma warning disable 612, 618 internal sealed class PreFlexRWTermVectorsWriter : TermVectorsWriter { private readonly Directory directory; private readonly string segment; private IndexOutput tvx = null, tvd = null, tvf = null; public PreFlexRWTermVectorsWriter(Directory directory, string segment, IOContext context) { this.directory = directory; this.segment = segment; bool success = false; try { // Open files for TermVector storage tvx = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene3xTermVectorsReader.VECTORS_INDEX_EXTENSION), context); tvx.WriteInt32(Lucene3xTermVectorsReader.FORMAT_CURRENT); tvd = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene3xTermVectorsReader.VECTORS_DOCUMENTS_EXTENSION), context); tvd.WriteInt32(Lucene3xTermVectorsReader.FORMAT_CURRENT); tvf = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene3xTermVectorsReader.VECTORS_FIELDS_EXTENSION), context); tvf.WriteInt32(Lucene3xTermVectorsReader.FORMAT_CURRENT); success = true; } finally { if (!success) { Abort(); } } } public override void StartDocument(int numVectorFields) { lastFieldName = null; this.numVectorFields = numVectorFields; tvx.WriteInt64(tvd.GetFilePointer()); tvx.WriteInt64(tvf.GetFilePointer()); tvd.WriteVInt32(numVectorFields); fieldCount = 0; fps = ArrayUtil.Grow(fps, numVectorFields); } private long[] fps = new long[10]; // pointers to the tvf before writing each field private int fieldCount = 0; // number of fields we have written so far for this document private int numVectorFields = 0; // total number of fields we will write for this document private string lastFieldName; public override void StartField(FieldInfo info, int numTerms, bool positions, bool offsets, bool payloads) { if (Debugging.AssertsEnabled) Debugging.Assert(lastFieldName == null || info.Name.CompareToOrdinal(lastFieldName) > 0, () => "fieldName=" + info.Name + " lastFieldName=" + lastFieldName); lastFieldName = info.Name; if (payloads) { throw new NotSupportedException("3.x codec does not support payloads on vectors!"); } this.positions = positions; this.offsets = offsets; lastTerm.Length = 0; fps[fieldCount++] = tvf.GetFilePointer(); tvd.WriteVInt32(info.Number); tvf.WriteVInt32(numTerms); sbyte bits = 0x0; if (positions) { bits |= Lucene3xTermVectorsReader.STORE_POSITIONS_WITH_TERMVECTOR; } if (offsets) { bits |= Lucene3xTermVectorsReader.STORE_OFFSET_WITH_TERMVECTOR; } tvf.WriteByte((byte)bits); if (Debugging.AssertsEnabled) Debugging.Assert(fieldCount <= numVectorFields); if (fieldCount == numVectorFields) { // last field of the document // this is crazy because the file format is crazy! for (int i = 1; i < fieldCount; i++) { tvd.WriteVInt64(fps[i] - fps[i - 1]); } } } private readonly BytesRef lastTerm = new BytesRef(10); // NOTE: we override addProx, so we don't need to buffer when indexing. // we also don't buffer during bulk merges. private int[] offsetStartBuffer = new int[10]; private int[] offsetEndBuffer = new int[10]; private int offsetIndex = 0; private int offsetFreq = 0; private bool positions = false; private bool offsets = false; public override void StartTerm(BytesRef term, int freq) { int prefix = StringHelper.BytesDifference(lastTerm, term); int suffix = term.Length - prefix; tvf.WriteVInt32(prefix); tvf.WriteVInt32(suffix); tvf.WriteBytes(term.Bytes, term.Offset + prefix, suffix); tvf.WriteVInt32(freq); lastTerm.CopyBytes(term); lastPosition = lastOffset = 0; if (offsets && positions) { // we might need to buffer if its a non-bulk merge offsetStartBuffer = ArrayUtil.Grow(offsetStartBuffer, freq); offsetEndBuffer = ArrayUtil.Grow(offsetEndBuffer, freq); offsetIndex = 0; offsetFreq = freq; } } internal int lastPosition = 0; internal int lastOffset = 0; public override void AddPosition(int position, int startOffset, int endOffset, BytesRef payload) { if (Debugging.AssertsEnabled) Debugging.Assert(payload == null); if (positions && offsets) { // write position delta tvf.WriteVInt32(position - lastPosition); lastPosition = position; // buffer offsets offsetStartBuffer[offsetIndex] = startOffset; offsetEndBuffer[offsetIndex] = endOffset; offsetIndex++; // dump buffer if we are done if (offsetIndex == offsetFreq) { for (int i = 0; i < offsetIndex; i++) { tvf.WriteVInt32(offsetStartBuffer[i] - lastOffset); tvf.WriteVInt32(offsetEndBuffer[i] - offsetStartBuffer[i]); lastOffset = offsetEndBuffer[i]; } } } else if (positions) { // write position delta tvf.WriteVInt32(position - lastPosition); lastPosition = position; } else if (offsets) { // write offset deltas tvf.WriteVInt32(startOffset - lastOffset); tvf.WriteVInt32(endOffset - startOffset); lastOffset = endOffset; } } public override void Abort() { try { Dispose(); } #pragma warning disable 168 catch (Exception ignored) #pragma warning restore 168 { } IOUtils.DeleteFilesIgnoringExceptions(directory, IndexFileNames.SegmentFileName(segment, "", Lucene3xTermVectorsReader.VECTORS_INDEX_EXTENSION), IndexFileNames.SegmentFileName(segment, "", Lucene3xTermVectorsReader.VECTORS_DOCUMENTS_EXTENSION), IndexFileNames.SegmentFileName(segment, "", Lucene3xTermVectorsReader.VECTORS_FIELDS_EXTENSION)); } public override void Finish(FieldInfos fis, int numDocs) { if (4 + ((long)numDocs) * 16 != tvx.GetFilePointer()) // this is most likely a bug in Sun JRE 1.6.0_04/_05; // we detect that the bug has struck, here, and // throw an exception to prevent the corruption from // entering the index. See LUCENE-1282 for // details. { throw new Exception("tvx size mismatch: mergedDocs is " + numDocs + " but tvx size is " + tvx.GetFilePointer() + " file=" + tvx.ToString() + "; now aborting this merge to prevent index corruption"); } } /// <summary> /// Close all streams. </summary> protected override void Dispose(bool disposing) { // make an effort to close all streams we can but remember and re-throw // the first exception encountered in this process IOUtils.Dispose(tvx, tvd, tvf); tvx = tvd = tvf = null; } public override IComparer<BytesRef> Comparer => BytesRef.UTF8SortedAsUTF16Comparer; } #pragma warning restore 612, 618 }
using System; using System.IO; using System.Net.Sockets; using System.Net.Security; using System.Security.Authentication; using System.Security.Cryptography.X509Certificates; using System.Threading; using System.Collections.Generic; using System.ComponentModel; using System.Diagnostics; using System.Net; using System.Net.NetworkInformation; using FluentFTP.Helpers; using FluentFTP.Exceptions; #if CORE || NET45 using System.Threading.Tasks; #endif namespace FluentFTP { /// <summary> /// Stream class used for talking. Used by FtpClient, extended by FtpDataStream /// </summary> public class FtpSocketStream : Stream, IDisposable { public readonly FtpClient Client; public FtpSocketStream(FtpClient conn) { Client = conn; } /// <summary> /// Used for tacking read/write activity on the socket /// to determine if Poll() should be used to test for /// socket connectivity. The socket in this class will /// not know it has been disconnected if the remote host /// closes the connection first. Using Poll() avoids /// the exception that would be thrown when trying to /// read or write to the disconnected socket. /// </summary> private DateTime m_lastActivity = DateTime.Now; private Socket m_socket = null; /// <summary> /// The socket used for talking /// </summary> protected Socket Socket { get => m_socket; private set => m_socket = value; } private int m_socketPollInterval = 15000; /// <summary> /// Gets or sets the length of time in milliseconds /// that must pass since the last socket activity /// before calling Poll() on the socket to test for /// connectivity. Setting this interval too low will /// have a negative impact on performance. Setting this /// interval to 0 disables Poll()'ing all together. /// The default value is 15 seconds. /// </summary> public int SocketPollInterval { get => m_socketPollInterval; set => m_socketPollInterval = value; } /// <summary> /// Gets the number of available bytes on the socket, 0 if the /// socket has not been initialized. This property is used internally /// by FtpClient in an effort to detect disconnections and gracefully /// reconnect the control connection. /// </summary> internal int SocketDataAvailable { get { if (m_socket != null) { return m_socket.Available; } return 0; } } /// <summary> /// Gets a value indicating if this socket stream is connected /// </summary> public bool IsConnected { get { try { if (m_socket == null) { return false; } if (!m_socket.Connected) { Close(); return false; } if (!CanRead || !CanWrite) { Close(); return false; } if (m_socketPollInterval > 0 && DateTime.Now.Subtract(m_lastActivity).TotalMilliseconds > m_socketPollInterval) { Client.LogStatus(FtpTraceLevel.Verbose, "Testing connectivity using Socket.Poll()..."); // FIX : #273 update m_lastActivity to the current time m_lastActivity = DateTime.Now; if (m_socket.Poll(500000, SelectMode.SelectRead) && m_socket.Available == 0) { Close(); return false; } } } catch (SocketException sockex) { Close(); Client.LogStatus(FtpTraceLevel.Warn, "FtpSocketStream.IsConnected: Caught and discarded SocketException while testing for connectivity: " + sockex.ToString()); return false; } catch (IOException ioex) { Close(); Client.LogStatus(FtpTraceLevel.Warn, "FtpSocketStream.IsConnected: Caught and discarded IOException while testing for connectivity: " + ioex.ToString()); return false; } return true; } } /// <summary> /// Gets a value indicating if encryption is being used /// </summary> public bool IsEncrypted { get { #if NO_SSL return false; #else return m_sslStream != null; #endif } } private NetworkStream m_netStream = null; /// <summary> /// The non-encrypted stream /// </summary> private NetworkStream NetworkStream { get => m_netStream; set => m_netStream = value; } #if !NO_SSL private BufferedStream m_bufStream = null; private SslStream m_sslStream = null; /// <summary> /// The encrypted stream /// </summary> private SslStream SslStream { get => m_sslStream; set => m_sslStream = value; } #endif /// <summary> /// Gets the underlying stream, could be a NetworkStream or SslStream /// </summary> protected Stream BaseStream { get { #if NO_SSL if (m_netStream != null) { return m_netStream; } #else if (m_sslStream != null) { return m_sslStream; } else if (m_netStream != null) { return m_netStream; } #endif return null; } } /// <summary> /// Gets a value indicating if this stream can be read /// </summary> public override bool CanRead { get { if (m_netStream != null) { return m_netStream.CanRead; } return false; } } /// <summary> /// Gets a value indicating if this stream if seekable /// </summary> public override bool CanSeek => false; /// <summary> /// Gets a value indicating if this stream can be written to /// </summary> public override bool CanWrite { get { if (m_netStream != null) { return m_netStream.CanWrite; } return false; } } /// <summary> /// Gets the length of the stream /// </summary> public override long Length => 0; /// <summary> /// Gets the current position of the stream. Trying to /// set this property throws an InvalidOperationException() /// </summary> public override long Position { get { if (BaseStream != null) { return BaseStream.Position; } return 0; } set => throw new InvalidOperationException(); } private event FtpSocketStreamSslValidation m_sslvalidate = null; /// <summary> /// Event is fired when a SSL certificate needs to be validated /// </summary> public event FtpSocketStreamSslValidation ValidateCertificate { add => m_sslvalidate += value; remove => m_sslvalidate -= value; } private int m_readTimeout = Timeout.Infinite; /// <summary> /// Gets or sets the amount of time to wait for a read operation to complete. Default /// value is Timeout.Infinite. /// </summary> public override int ReadTimeout { get => m_readTimeout; set { m_readTimeout = value; if (m_netStream != null) { m_netStream.ReadTimeout = m_readTimeout; } } } private int m_connectTimeout = 30000; /// <summary> /// Gets or sets the length of time milliseconds to wait /// for a connection succeed before giving up. The default /// is 30000 (30 seconds). /// </summary> public int ConnectTimeout { get => m_connectTimeout; set => m_connectTimeout = value; } /// <summary> /// Gets the local end point of the socket /// </summary> public IPEndPoint LocalEndPoint { get { if (m_socket == null) { return null; } return (IPEndPoint) m_socket.LocalEndPoint; } } /// <summary> /// Gets the remote end point of the socket /// </summary> public IPEndPoint RemoteEndPoint { get { if (m_socket == null) { return null; } return (IPEndPoint) m_socket.RemoteEndPoint; } } /// <summary> /// Fires the SSL certificate validation event /// </summary> /// <param name="certificate">Certificate being validated</param> /// <param name="chain">Certificate chain</param> /// <param name="errors">Policy errors if any</param> /// <returns>True if it was accepted, false otherwise</returns> protected bool OnValidateCertificate(X509Certificate certificate, X509Chain chain, SslPolicyErrors errors) { var evt = m_sslvalidate; if (evt != null) { var e = new FtpSslValidationEventArgs() { Certificate = certificate, Chain = chain, PolicyErrors = errors, Accept = errors == SslPolicyErrors.None }; evt(this, e); return e.Accept; } // if the event was not handled then only accept // the certificate if there were no validation errors return errors == SslPolicyErrors.None; } /// <summary> /// Throws an InvalidOperationException /// </summary> /// <param name="offset">Ignored</param> /// <param name="origin">Ignored</param> /// <returns></returns> public override long Seek(long offset, SeekOrigin origin) { throw new InvalidOperationException(); } /// <summary> /// Throws an InvalidOperationException /// </summary> /// <param name="value">Ignored</param> public override void SetLength(long value) { throw new InvalidOperationException(); } /// <summary> /// Flushes the stream /// </summary> public override void Flush() { if (!IsConnected) { throw new InvalidOperationException("The FtpSocketStream object is not connected."); } if (BaseStream == null) { throw new InvalidOperationException("The base stream of the FtpSocketStream object is null."); } BaseStream.Flush(); } #if ASYNC /// <summary> /// Flushes the stream asynchronously /// </summary> /// <param name="token">The <see cref="CancellationToken"/> for this task</param> public override async Task FlushAsync(CancellationToken token) { if (!IsConnected) { throw new InvalidOperationException("The FtpSocketStream object is not connected."); } if (BaseStream == null) { throw new InvalidOperationException("The base stream of the FtpSocketStream object is null."); } await BaseStream.FlushAsync(token); } #endif /// <summary> /// Bypass the stream and read directly off the socket. /// </summary> /// <param name="buffer">The buffer to read into</param> /// <returns>The number of bytes read</returns> internal int RawSocketRead(byte[] buffer) { var read = 0; if (m_socket != null && m_socket.Connected) { read = m_socket.Receive(buffer, buffer.Length, 0); } return read; } #if ASYNC internal async Task EnableCancellation(Task task, CancellationToken token, Action action) { var registration = token.Register(action); _ = task.ContinueWith(x => registration.Dispose(), CancellationToken.None); await task; } internal async Task<T> EnableCancellation<T>(Task<T> task, CancellationToken token, Action action) { var registration = token.Register(action); _ = task.ContinueWith(x => registration.Dispose(), CancellationToken.None); return await task; } #endif #if NET45 /// <summary> /// Bypass the stream and read directly off the socket. /// </summary> /// <param name="buffer">The buffer to read into</param> /// <param name="token">The token that can be used to cancel the entire process</param> /// <returns>The number of bytes read</returns> internal async Task<int> RawSocketReadAsync(byte[] buffer, CancellationToken token) { var read = 0; if (m_socket != null && m_socket.Connected) { var asyncResult = m_socket.BeginReceive(buffer, 0, buffer.Length, 0, null, null); read = await EnableCancellation( Task.Factory.FromAsync(asyncResult, m_socket.EndReceive), token, () => CloseSocket() ); } return read; } #endif #if ASYNC && !NET45 /// <summary> /// Bypass the stream and read directly off the socket. /// </summary> /// <param name="buffer">The buffer to read into</param> /// <returns>The number of bytes read</returns> internal async Task<int> RawSocketReadAsync(byte[] buffer, CancellationToken token) { var read = 0; if (m_socket != null && m_socket.Connected && !token.IsCancellationRequested) { read = await m_socket.ReceiveAsync(new ArraySegment<byte>(buffer), 0); } return read; } #endif /// <summary> /// Reads data from the stream /// </summary> /// <param name="buffer">Buffer to read into</param> /// <param name="offset">Where in the buffer to start</param> /// <param name="count">Number of bytes to be read</param> /// <returns>The amount of bytes read from the stream</returns> public override int Read(byte[] buffer, int offset, int count) { #if !CORE IAsyncResult ar = null; #endif if (BaseStream == null) { return 0; } m_lastActivity = DateTime.Now; #if CORE return BaseStream.Read(buffer, offset, count); #else ar = BaseStream.BeginRead(buffer, offset, count, null, null); bool success = ar.AsyncWaitHandle.WaitOne(m_readTimeout, true); ar.AsyncWaitHandle.Close(); if (!success) { Close(); throw new TimeoutException("Timed out trying to read data from the socket stream!"); } return BaseStream.EndRead(ar); #endif } #if ASYNC /// <summary> /// Reads data from the stream /// </summary> /// <param name="buffer">Buffer to read into</param> /// <param name="offset">Where in the buffer to start</param> /// <param name="count">Number of bytes to be read</param> /// <param name="token">The <see cref="CancellationToken"/> for this task</param> /// <returns>The amount of bytes read from the stream</returns> public override async Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken token) { if (BaseStream == null) { return 0; } m_lastActivity = DateTime.Now; using (var cts = CancellationTokenSource.CreateLinkedTokenSource(token)) { cts.CancelAfter(ReadTimeout); cts.Token.Register(() => Close()); try { var res = await BaseStream.ReadAsync(buffer, offset, count, cts.Token); return res; } catch { // CTS for Cancellation triggered and caused the exception if (token.IsCancellationRequested) { throw new OperationCanceledException("Cancelled read from socket stream"); } // CTS for Timeout triggered and caused the exception if (cts.IsCancellationRequested) { throw new TimeoutException("Timed out trying to read data from the socket stream!"); } // Nothing of the above. So we rethrow the exception. throw; } } } #endif /// <summary> /// Reads a line from the socket /// </summary> /// <param name="encoding">The type of encoding used to convert from byte[] to string</param> /// <returns>A line from the stream, null if there is nothing to read</returns> public string ReadLine(System.Text.Encoding encoding) { var data = new List<byte>(); var buf = new byte[1]; string line = null; while (Read(buf, 0, buf.Length) > 0) { data.Add(buf[0]); if ((char) buf[0] == '\n') { line = encoding.GetString(data.ToArray()).Trim('\r', '\n'); break; } } return line; } /// <summary> /// Reads all line from the socket /// </summary> /// <param name="encoding">The type of encoding used to convert from byte[] to string</param> /// <param name="bufferSize">The size of the buffer</param> /// <returns>A list of lines from the stream</returns> public IEnumerable<string> ReadAllLines(System.Text.Encoding encoding, int bufferSize) { int charRead; var data = new List<byte>(); var buf = new byte[bufferSize]; while ((charRead = Read(buf, 0, buf.Length)) > 0) { var firstByteToReadIdx = 0; var separatorIdx = Array.IndexOf(buf, (byte) '\n', firstByteToReadIdx, charRead - firstByteToReadIdx); //search in full byte array readed while (separatorIdx >= 0) // at least one '\n' returned { while (firstByteToReadIdx <= separatorIdx) { data.Add(buf[firstByteToReadIdx++]); } var line = encoding.GetString(data.ToArray()).Trim('\r', '\n'); // convert data to string yield return line; data.Clear(); separatorIdx = Array.IndexOf(buf, (byte) '\n', firstByteToReadIdx, charRead - firstByteToReadIdx); //search in full byte array readed } while (firstByteToReadIdx < charRead) // add all remaining characters to data { data.Add(buf[firstByteToReadIdx++]); } } } #if ASYNC /// <summary> /// Reads a line from the socket asynchronously /// </summary> /// <param name="encoding">The type of encoding used to convert from byte[] to string</param> /// <param name="token">The <see cref="CancellationToken"/> for this task</param> /// <returns>A line from the stream, null if there is nothing to read</returns> public async Task<string> ReadLineAsync(System.Text.Encoding encoding, CancellationToken token) { var data = new List<byte>(); var buf = new byte[1]; string line = null; while (await ReadAsync(buf, 0, buf.Length, token) > 0) { data.Add(buf[0]); if ((char) buf[0] == '\n') { line = encoding.GetString(data.ToArray()).Trim('\r', '\n'); break; } } return line; } /// <summary> /// Reads all line from the socket /// </summary> /// <param name="encoding">The type of encoding used to convert from byte[] to string</param> /// <param name="bufferSize">The size of the buffer</param> /// <returns>A list of lines from the stream</returns> public async Task<IEnumerable<string>> ReadAllLinesAsync(System.Text.Encoding encoding, int bufferSize, CancellationToken token) { int charRead; var data = new List<byte>(); var lines = new List<string>(); var buf = new byte[bufferSize]; while ((charRead = await ReadAsync(buf, 0, buf.Length, token)) > 0) { var firstByteToReadIdx = 0; var separatorIdx = Array.IndexOf(buf, (byte) '\n', firstByteToReadIdx, charRead - firstByteToReadIdx); //search in full byte array read while (separatorIdx >= 0) // at least one '\n' returned { while (firstByteToReadIdx <= separatorIdx) { data.Add(buf[firstByteToReadIdx++]); } var line = encoding.GetString(data.ToArray()).Trim('\r', '\n'); // convert data to string lines.Add(line); data.Clear(); separatorIdx = Array.IndexOf(buf, (byte) '\n', firstByteToReadIdx, charRead - firstByteToReadIdx); //search in full byte array read } while (firstByteToReadIdx < charRead) // add all remaining characters to data { data.Add(buf[firstByteToReadIdx++]); } } return lines; } #endif /// <summary> /// Writes data to the stream /// </summary> /// <param name="buffer">Buffer to write to stream</param> /// <param name="offset">Where in the buffer to start</param> /// <param name="count">Number of bytes to be read</param> public override void Write(byte[] buffer, int offset, int count) { if (BaseStream == null) { return; } BaseStream.Write(buffer, offset, count); m_lastActivity = DateTime.Now; } #if ASYNC /// <summary> /// Writes data to the stream asynchronously /// </summary> /// <param name="buffer">Buffer to write to stream</param> /// <param name="offset">Where in the buffer to start</param> /// <param name="count">Number of bytes to be read</param> /// <param name="token">The <see cref="CancellationToken"/> for this task</param> public override async Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken token) { if (BaseStream == null) { return; } await BaseStream.WriteAsync(buffer, offset, count, token); m_lastActivity = DateTime.Now; } #endif /// <summary> /// Writes a line to the stream using the specified encoding /// </summary> /// <param name="encoding">Encoding used for writing the line</param> /// <param name="buf">The data to write</param> public void WriteLine(System.Text.Encoding encoding, string buf) { byte[] data; data = encoding.GetBytes(buf + "\r\n"); Write(data, 0, data.Length); } #if ASYNC /// <summary> /// Writes a line to the stream using the specified encoding asynchronously /// </summary> /// <param name="encoding">Encoding used for writing the line</param> /// <param name="buf">The data to write</param> /// <param name="token">The <see cref="CancellationToken"/> for this task</param> public async Task WriteLineAsync(System.Text.Encoding encoding, string buf, CancellationToken token) { var data = encoding.GetBytes(buf + "\r\n"); await WriteAsync(data, 0, data.Length, token); } #endif #if CORE /// <summary> /// Disconnects from server /// </summary> public virtual void Close() { Dispose(true); } #endif /// <summary> /// Disconnects from server /// </summary> protected override void Dispose(bool disposing) { // Fix: Hard catch and suppress all exceptions during disposing as there are constant issues with this method try { // ensure null exceptions don't occur here if (Client != null) { Client.LogStatus(FtpTraceLevel.Verbose, "Disposing FtpSocketStream..."); } } catch (Exception) { } #if !NO_SSL if (m_sslStream != null) { try { #if NET5_0_OR_GREATER || NETSTANDARD2_1_OR_GREATER m_sslStream.ShutdownAsync().RunSynchronously(); #endif m_sslStream.Dispose(); } catch (Exception ex) { } m_sslStream = null; } #endif #if !NO_SSL if (m_bufStream != null) { try { // ensure the last of the buffered bytes are flushed // before we close the socket and network stream m_bufStream.Flush(); m_bufStream.Dispose(); } catch (Exception ex) { } m_bufStream = null; } #endif if (m_netStream != null) { try { m_netStream.Dispose(); } catch (Exception ex) { } m_netStream = null; } CloseSocket(); } /// <summary> /// Safely close the socket if its open /// </summary> internal void CloseSocket() { if (m_socket != null) { try { #if !NET20 && !NET35 && !CORE m_socket.Dispose(); #endif if (m_socket.Connected) { #if NET5_0_OR_GREATER m_socket.Shutdown(SocketShutdown.Send); m_socket.Close(); #endif } } catch (Exception ex) { } m_socket = null; } } /// <summary> /// Sets socket options on the underlying socket /// </summary> /// <param name="level">SocketOptionLevel</param> /// <param name="name">SocketOptionName</param> /// <param name="value">SocketOptionValue</param> public void SetSocketOption(SocketOptionLevel level, SocketOptionName name, bool value) { if (m_socket == null) { throw new InvalidOperationException("The underlying socket is null. Have you established a connection?"); } m_socket.SetSocketOption(level, name, value); } /// <summary> /// Connect to the specified host /// </summary> /// <param name="host">The host to connect to</param> /// <param name="port">The port to connect to</param> /// <param name="ipVersions">Internet Protocol versions to support during the connection phase</param> public void Connect(string host, int port, FtpIpVersion ipVersions) { #if CORE IPAddress[] addresses = Dns.GetHostAddressesAsync(host).Result; #else IAsyncResult ar = null; var addresses = Dns.GetHostAddresses(host); #endif if (ipVersions == 0) { throw new ArgumentException("The ipVersions parameter must contain at least 1 flag."); } for (var i = 0; i < addresses.Length; i++) { // we don't need to do this check unless // a particular version of IP has been // omitted so we won't. if (ipVersions != FtpIpVersion.ANY) { switch (addresses[i].AddressFamily) { case AddressFamily.InterNetwork: if ((ipVersions & FtpIpVersion.IPv4) != FtpIpVersion.IPv4) { #if DEBUG Client.LogStatus(FtpTraceLevel.Verbose, "Skipped IPV4 address : " + addresses[i].ToString()); #endif continue; } break; case AddressFamily.InterNetworkV6: if ((ipVersions & FtpIpVersion.IPv6) != FtpIpVersion.IPv6) { #if DEBUG Client.LogStatus(FtpTraceLevel.Verbose, "Skipped IPV6 address : " + addresses[i].ToString()); #endif continue; } break; } } if (FtpTrace.LogIP) { Client.LogStatus(FtpTraceLevel.Info, "Connecting to " + addresses[i].ToString() + ":" + port); } else { Client.LogStatus(FtpTraceLevel.Info, "Connecting to ***:" + port); } m_socket = new Socket(addresses[i].AddressFamily, SocketType.Stream, ProtocolType.Tcp); BindSocketToLocalIp(); #if CORE var args = new SocketAsyncEventArgs { RemoteEndPoint = new IPEndPoint(addresses[i], port) }; var connectEvent = new ManualResetEvent(false); args.Completed += (s, e) => { connectEvent.Set(); }; if (m_socket.ConnectAsync(args)) { if (!connectEvent.WaitOne(m_connectTimeout)) { Close(); if (i + 1 == addresses.Length) { throw new TimeoutException("Timed out trying to connect!"); } } } if (args.SocketError != SocketError.Success) { throw new SocketException((int) args.SocketError); } break; #else ar = m_socket.BeginConnect(addresses[i], port, null, null); bool success = ar.AsyncWaitHandle.WaitOne(m_connectTimeout, true); ar.AsyncWaitHandle.Close(); if (!success) { Close(); // check to see if we're out of addresses, and throw a TimeoutException if (i + 1 == addresses.Length) { throw new TimeoutException("Timed out trying to connect!"); } } else { m_socket.EndConnect(ar); // we got a connection, break out // of the loop. break; } #endif } // make sure that we actually connected to // one of the addresses returned from GetHostAddresses() if (m_socket == null || !m_socket.Connected) { Close(); throw new IOException("Failed to connect to host."); } m_netStream = new NetworkStream(m_socket); m_netStream.ReadTimeout = m_readTimeout; m_lastActivity = DateTime.Now; } #if ASYNC /// <summary> /// Connect to the specified host /// </summary> /// <param name="host">The host to connect to</param> /// <param name="port">The port to connect to</param> /// <param name="ipVersions">Internet Protocol versions to support during the connection phase</param> /// <param name="token">The token that can be used to cancel the entire process</param> public async Task ConnectAsync(string host, int port, FtpIpVersion ipVersions, CancellationToken token) { IPAddress[] addresses = await Dns.GetHostAddressesAsync(host); if (ipVersions == 0) { throw new ArgumentException("The ipVersions parameter must contain at least 1 flag."); } for (var i = 0; i < addresses.Length; i++) { // we don't need to do this check unless // a particular version of IP has been // omitted so we won't. if (ipVersions != FtpIpVersion.ANY) { switch (addresses[i].AddressFamily) { case AddressFamily.InterNetwork: if ((ipVersions & FtpIpVersion.IPv4) != FtpIpVersion.IPv4) { #if DEBUG Client.LogStatus(FtpTraceLevel.Verbose, "Skipped IPV4 address : " + addresses[i].ToString()); #endif continue; } break; case AddressFamily.InterNetworkV6: if ((ipVersions & FtpIpVersion.IPv6) != FtpIpVersion.IPv6) { #if DEBUG Client.LogStatus(FtpTraceLevel.Verbose, "Skipped IPV6 address : " + addresses[i].ToString()); #endif continue; } break; } } if (FtpTrace.LogIP) { Client.LogStatus(FtpTraceLevel.Info, "Connecting to " + addresses[i].ToString() + ":" + port); } else { Client.LogStatus(FtpTraceLevel.Info, "Connecting to ***:" + port); } m_socket = new Socket(addresses[i].AddressFamily, SocketType.Stream, ProtocolType.Tcp); BindSocketToLocalIp(); #if CORE if (this.ConnectTimeout > 0) { using (var timeoutSrc = CancellationTokenSource.CreateLinkedTokenSource(token)) { timeoutSrc.CancelAfter(this.ConnectTimeout); await EnableCancellation(m_socket.ConnectAsync(addresses[i], port), timeoutSrc.Token, () => CloseSocket()); break; } } else { await EnableCancellation(m_socket.ConnectAsync(addresses[i], port), token, () => CloseSocket()); break; } #else var connectResult = m_socket.BeginConnect(addresses[i], port, null, null); await EnableCancellation(Task.Factory.FromAsync(connectResult, m_socket.EndConnect), token, () => CloseSocket()); break; #endif } // make sure that we actually connected to // one of the addresses returned from GetHostAddresses() if (m_socket == null || !m_socket.Connected) { Close(); throw new IOException("Failed to connect to host."); } m_netStream = new NetworkStream(m_socket); m_netStream.ReadTimeout = m_readTimeout; m_lastActivity = DateTime.Now; } #endif #if !NO_SSL /// <summary> /// Activates SSL on this stream using default protocols. Fires the ValidateCertificate event. /// If this event is not handled and there are SslPolicyErrors present, the certificate will /// not be accepted. /// </summary> /// <param name="targethost">The host to authenticate the certificate against</param> public void ActivateEncryption(string targethost) { ActivateEncryption(targethost, null, Client.SslProtocols); } #if ASYNC /// <summary> /// Activates SSL on this stream using default protocols. Fires the ValidateCertificate event. /// If this event is not handled and there are SslPolicyErrors present, the certificate will /// not be accepted. /// </summary> /// <param name="targethost">The host to authenticate the certificate against</param> public async Task ActivateEncryptionAsync(string targethost) { await ActivateEncryptionAsync(targethost, null, Client.SslProtocols); } #endif /// <summary> /// Activates SSL on this stream using default protocols. Fires the ValidateCertificate event. /// If this event is not handled and there are SslPolicyErrors present, the certificate will /// not be accepted. /// </summary> /// <param name="targethost">The host to authenticate the certificate against</param> /// <param name="clientCerts">A collection of client certificates to use when authenticating the SSL stream</param> public void ActivateEncryption(string targethost, X509CertificateCollection clientCerts) { ActivateEncryption(targethost, clientCerts, Client.SslProtocols); } #if ASYNC /// <summary> /// Activates SSL on this stream using default protocols. Fires the ValidateCertificate event. /// If this event is not handled and there are SslPolicyErrors present, the certificate will /// not be accepted. /// </summary> /// <param name="targethost">The host to authenticate the certificate against</param> /// <param name="clientCerts">A collection of client certificates to use when authenticating the SSL stream</param> public async Task ActivateEncryptionAsync(string targethost, X509CertificateCollection clientCerts) { await ActivateEncryptionAsync(targethost, clientCerts, Client.SslProtocols); } #endif /// <summary> /// Activates SSL on this stream using the specified protocols. Fires the ValidateCertificate event. /// If this event is not handled and there are SslPolicyErrors present, the certificate will /// not be accepted. /// </summary> /// <param name="targethost">The host to authenticate the certificate against</param> /// <param name="clientCerts">A collection of client certificates to use when authenticating the SSL stream</param> /// <param name="sslProtocols">A bitwise parameter for supported encryption protocols.</param> /// <exception cref="AuthenticationException">Thrown when authentication fails</exception> public void ActivateEncryption(string targethost, X509CertificateCollection clientCerts, SslProtocols sslProtocols) { if (!IsConnected) { throw new InvalidOperationException("The FtpSocketStream object is not connected."); } if (m_netStream == null) { throw new InvalidOperationException("The base network stream is null."); } if (m_sslStream != null) { throw new InvalidOperationException("SSL Encryption has already been enabled on this stream."); } try { DateTime auth_start; TimeSpan auth_time_total; CreateBufferStream(); #if CORE m_sslStream = new SslStream(GetBufferStream(), true, new RemoteCertificateValidationCallback( delegate(object sender, X509Certificate certificate, X509Chain chain, SslPolicyErrors sslPolicyErrors) { return OnValidateCertificate(certificate, chain, sslPolicyErrors); } )); #else m_sslStream = new FtpSslStream(GetBufferStream(), true, new RemoteCertificateValidationCallback( delegate(object sender, X509Certificate certificate, X509Chain chain, SslPolicyErrors sslPolicyErrors) { return OnValidateCertificate(certificate, chain, sslPolicyErrors); } )); #endif auth_start = DateTime.Now; try { #if CORE m_sslStream.AuthenticateAsClientAsync(targethost, clientCerts, sslProtocols, Client.ValidateCertificateRevocation).Wait(); #else m_sslStream.AuthenticateAsClient(targethost, clientCerts, sslProtocols, Client.ValidateCertificateRevocation); #endif } catch (IOException ex) { if (ex.InnerException is Win32Exception) { var win32Exception = (Win32Exception) ex.InnerException; if (win32Exception.NativeErrorCode == 10053) { throw new FtpMissingSocketException(ex); } } throw; } auth_time_total = DateTime.Now.Subtract(auth_start); Client.LogStatus(FtpTraceLevel.Info, "FTPS Authentication Successful"); Client.LogStatus(FtpTraceLevel.Verbose, "Time to activate encryption: " + auth_time_total.Hours + "h " + auth_time_total.Minutes + "m " + auth_time_total.Seconds + "s. Total Seconds: " + auth_time_total.TotalSeconds + "."); } catch (AuthenticationException) { // authentication failed and in addition it left our // ssl stream in an unusable state so cleanup needs // to be done and the exception can be re-thrown for // handling down the chain. (Add logging?) Close(); Client.LogStatus(FtpTraceLevel.Error, "FTPS Authentication Failed"); throw; } } /// <summary> /// Conditionally create a SSL BufferStream based on the configuration in FtpClient.SslBuffering. /// </summary> private void CreateBufferStream() { // Fix: SSL BufferStream is automatically disabled when using FTP proxies, and enabled in all other cases // Fix: SSL Buffering is disabled on .NET 5.0 due to issues in .NET framework - See #682 #if NET50 m_bufStream = null; #else if (Client.SslBuffering == FtpsBuffering.On || Client.SslBuffering == FtpsBuffering.Auto && !Client.IsProxy()) { m_bufStream = new BufferedStream(NetworkStream, 81920); } else { m_bufStream = null; } #endif } /// <summary> /// If SSL Buffering is enabled it returns the BufferStream, else returns the internal NetworkStream. /// </summary> /// <returns></returns> private Stream GetBufferStream() { return m_bufStream != null ? (Stream)m_bufStream : (Stream)NetworkStream; } #if ASYNC /// <summary> /// Activates SSL on this stream using the specified protocols. Fires the ValidateCertificate event. /// If this event is not handled and there are SslPolicyErrors present, the certificate will /// not be accepted. /// </summary> /// <param name="targethost">The host to authenticate the certificate against</param> /// <param name="clientCerts">A collection of client certificates to use when authenticating the SSL stream</param> /// <param name="sslProtocols">A bitwise parameter for supported encryption protocols.</param> /// <exception cref="AuthenticationException">Thrown when authentication fails</exception> public async Task ActivateEncryptionAsync(string targethost, X509CertificateCollection clientCerts, SslProtocols sslProtocols) { if (!IsConnected) { throw new InvalidOperationException("The FtpSocketStream object is not connected."); } if (m_netStream == null) { throw new InvalidOperationException("The base network stream is null."); } if (m_sslStream != null) { throw new InvalidOperationException("SSL Encryption has already been enabled on this stream."); } try { DateTime auth_start; TimeSpan auth_time_total; CreateBufferStream(); #if CORE m_sslStream = new SslStream(GetBufferStream(), true, new RemoteCertificateValidationCallback( delegate(object sender, X509Certificate certificate, X509Chain chain, SslPolicyErrors sslPolicyErrors) { return OnValidateCertificate(certificate, chain, sslPolicyErrors); } )); #else m_sslStream = new FtpSslStream(GetBufferStream(), true, new RemoteCertificateValidationCallback( delegate(object sender, X509Certificate certificate, X509Chain chain, SslPolicyErrors sslPolicyErrors) { return OnValidateCertificate(certificate, chain, sslPolicyErrors); } )); #endif auth_start = DateTime.Now; try { await m_sslStream.AuthenticateAsClientAsync(targethost, clientCerts, sslProtocols, Client.ValidateCertificateRevocation); } catch (IOException ex) { if (ex.InnerException is Win32Exception) { var win32Exception = (Win32Exception) ex.InnerException; if (win32Exception.NativeErrorCode == 10053) { throw new FtpMissingSocketException(ex); } } throw; } auth_time_total = DateTime.Now.Subtract(auth_start); Client.LogStatus(FtpTraceLevel.Info, "FTPS Authentication Successful"); Client.LogStatus(FtpTraceLevel.Verbose, "Time to activate encryption: " + auth_time_total.Hours + "h " + auth_time_total.Minutes + "m " + auth_time_total.Seconds + "s. Total Seconds: " + auth_time_total.TotalSeconds + "."); } catch (AuthenticationException) { // authentication failed and in addition it left our // ssl stream in an unusable state so cleanup needs // to be done and the exception can be re-thrown for // handling down the chain. (Add logging?) Close(); Client.LogStatus(FtpTraceLevel.Error, "FTPS Authentication Failed"); throw; } } #endif #endif #if !CORE /// <summary> /// Deactivates SSL on this stream using the specified protocols and reverts back to plain-text FTP. /// </summary> public void DeactivateEncryption() { if (!IsConnected) { throw new InvalidOperationException("The FtpSocketStream object is not connected."); } if (m_sslStream == null) { throw new InvalidOperationException("SSL Encryption has not been enabled on this stream."); } m_sslStream.Close(); m_sslStream = null; } #endif /// <summary> /// Instructs this stream to listen for connections on the specified address and port /// </summary> /// <param name="address">The address to listen on</param> /// <param name="port">The port to listen on</param> public void Listen(IPAddress address, int port) { if (!IsConnected) { if (m_socket == null) { m_socket = new Socket(address.AddressFamily, SocketType.Stream, ProtocolType.Tcp); } m_socket.Bind(new IPEndPoint(address, port)); m_socket.Listen(1); } } /// <summary> /// Accepts a connection from a listening socket /// </summary> public void Accept() { if (m_socket != null) { m_socket = m_socket.Accept(); } } #if NET45 /// <summary> /// Accepts a connection from a listening socket /// </summary> public async Task AcceptAsync() { if (m_socket != null) { var iar = m_socket.BeginAccept(null, null); await Task.Factory.FromAsync(iar, m_socket.EndAccept); } } #endif #if ASYNC && !NET45 /// <summary> /// Accepts a connection from a listening socket /// </summary> public async Task AcceptAsync() { if (m_socket != null) { m_socket = await m_socket.AcceptAsync(); #if CORE m_netStream = new NetworkStream(m_socket); m_netStream.ReadTimeout = m_readTimeout; #endif } } #else /// <summary> /// Asynchronously accepts a connection from a listening socket /// </summary> /// <param name="callback"></param> /// <param name="state"></param> /// <returns></returns> public IAsyncResult BeginAccept(AsyncCallback callback, object state) { if (m_socket != null) { return m_socket.BeginAccept(callback, state); } return null; } /// <summary> /// Completes a BeginAccept() operation /// </summary> /// <param name="ar">IAsyncResult returned from BeginAccept</param> public void EndAccept(IAsyncResult ar) { if (m_socket != null) { m_socket = m_socket.EndAccept(ar); m_netStream = new NetworkStream(m_socket); m_netStream.ReadTimeout = m_readTimeout; } } #endif private void BindSocketToLocalIp() { #if ASYNC && !CORE14 && !CORE16 if (Client.SocketLocalIp != null) { var localPort = LocalPorts.GetRandomAvailable(Client.SocketLocalIp); var localEndpoint = new IPEndPoint(Client.SocketLocalIp, localPort); #if DEBUG Client.LogStatus(FtpTraceLevel.Verbose, $"Will now bind to {localEndpoint}"); #endif this.m_socket.Bind(localEndpoint); } #endif } #if CORE internal SocketAsyncEventArgs BeginAccept() { var args = new SocketAsyncEventArgs(); var connectEvent = new ManualResetEvent(false); args.UserToken = connectEvent; args.Completed += (s, e) => { connectEvent.Set(); }; if (!m_socket.AcceptAsync(args)) { CheckResult(args); return null; } return args; } internal void EndAccept(SocketAsyncEventArgs args, int timeout) { if (args == null) { return; } var connectEvent = (ManualResetEvent) args.UserToken; if (!connectEvent.WaitOne(timeout)) { Close(); throw new TimeoutException("Timed out waiting for the server to connect to the active data socket."); } CheckResult(args); } private void CheckResult(SocketAsyncEventArgs args) { if (args.SocketError != SocketError.Success) { throw new SocketException((int) args.SocketError); } m_socket = args.AcceptSocket; m_netStream = new NetworkStream(args.AcceptSocket); m_netStream.ReadTimeout = m_readTimeout; } #endif } }
// Copyright (c) Charlie Poole, Rob Prouse and Contributors. MIT License - see LICENSE.txt #nullable enable using System; using System.Collections; using System.Collections.Generic; using System.Linq; using System.Reflection; using NUnit.Framework.Internal; namespace NUnit.Framework.Constraints { /// <summary> /// UniqueItemsConstraint tests whether all the items in a /// collection are unique. /// </summary> public class UniqueItemsConstraint : CollectionItemsEqualConstraint { /// <summary> /// The Description of what this constraint tests, for /// use in messages and in the ConstraintResult. /// </summary> public override string Description { get { return "all items unique"; } } /// <summary> /// Check that all items are unique. /// </summary> /// <param name="actual"></param> /// <returns></returns> protected override bool Matches(IEnumerable actual) { var nonUniqueItems = GetNonUniqueItems(actual); return nonUniqueItems.Count == 0; } /// <inheritdoc /> public override ConstraintResult ApplyTo<TActual>(TActual actual) { IEnumerable enumerable = ConstraintUtils.RequireActual<IEnumerable>(actual, nameof(actual)); var nonUniqueItems = GetNonUniqueItems(enumerable); return new UniqueItemsConstraintResult(this, actual, nonUniqueItems); } private ICollection OriginalAlgorithm(IEnumerable actual) { var nonUniques = new List<object>(); var processedItems = new List<object>(); foreach (var o1 in actual) { var isUnique = true; var unknownNonUnique = false; foreach (var o2 in processedItems) { if (ItemsEqual(o1, o2)) { isUnique = false; unknownNonUnique = !nonUniques.Any(o2 => ItemsEqual(o1, o2)); break; } } if (isUnique) processedItems.Add(o1); else if (unknownNonUnique) { nonUniques.Add(o1); if (nonUniques.Count == MsgUtils.DefaultMaxItems) break; } } return nonUniques; } private ICollection? TryInferFastPath(IEnumerable actual) { var allTypes = new List<Type>(); var allItems = new List<object>(); foreach (var item in actual) { allItems.Add(item); if (item != null) allTypes.Add(item.GetType()); } // Partly optimization, partly makes any subsequent all()/any() calls reliable if (allTypes.Count == 0) return new object[0]; var distinctTypes = allTypes.Distinct().ToList(); if (distinctTypes.Count == 1) { var itemsType = distinctTypes.FirstOrDefault(); if (IsTypeSafeForFastPath(itemsType)) { var itemsOfT = ItemsCastMethod.MakeGenericMethod(itemsType).Invoke(null, new[] { actual }); if (IgnoringCase) { if (itemsType == typeof(string)) return (ICollection)StringsUniqueIgnoringCase((IEnumerable<string>)itemsOfT); else if (itemsType == typeof(char)) return (ICollection)CharsUniqueIgnoringCase((IEnumerable<char>)itemsOfT); } return (ICollection)ItemsUniqueMethod.MakeGenericMethod(itemsType).Invoke(null, new object[] { itemsOfT }); } } else { if (distinctTypes.All(o => IsTypeSafeForFastPath(o) && !IsSpecialComparisonType(o))) { return (ICollection)ItemsUnique(allItems); } } return null; } private static bool IsSpecialComparisonType(Type type) { if (type.IsGenericType) return type.FullName.StartsWith("System.Collections.Generic.KeyValuePair`2", StringComparison.Ordinal); else if (Numerics.IsNumericType(type)) return true; else return type == typeof(string) || type == typeof(char) || type == typeof(DateTimeOffset) || type == typeof(DictionaryEntry); } private ICollection GetNonUniqueItems(IEnumerable actual) { // If the user specified any external comparer with Using, exit if (UsingExternalComparer) return OriginalAlgorithm(actual); // If IEnumerable<T> is not implemented exit, // Otherwise return value is the Type of T Type? memberType = GetGenericTypeArgument(actual); if (memberType == null) return TryInferFastPath(actual) ?? OriginalAlgorithm(actual); else if (!IsTypeSafeForFastPath(memberType)) return OriginalAlgorithm(actual); // Special handling for ignore case with strings and chars if (IgnoringCase) { if (memberType == typeof(string)) return (ICollection)StringsUniqueIgnoringCase((IEnumerable<string>)actual); else if (memberType == typeof(char)) return (ICollection)CharsUniqueIgnoringCase((IEnumerable<char>)actual); } return (ICollection)ItemsUniqueMethod.MakeGenericMethod(memberType).Invoke(null, new object[] { actual }); } private static bool IsTypeSafeForFastPath(Type? type) { return type != null && type.IsSealed && !IsHandledSpeciallyByNUnit(type); } private static readonly MethodInfo ItemsUniqueMethod = typeof(UniqueItemsConstraint).GetMethod(nameof(ItemsUnique), BindingFlags.Static | BindingFlags.NonPublic); private static readonly MethodInfo ItemsCastMethod = typeof(Enumerable).GetMethod(nameof(Enumerable.Cast), BindingFlags.Static | BindingFlags.Public); private static ICollection<T> ItemsUnique<T>(IEnumerable<T> actual) => NonUniqueItemsInternal(actual, EqualityComparer<T>.Default); private ICollection<string> StringsUniqueIgnoringCase(IEnumerable<string> actual) => NonUniqueItemsInternal(actual, new NUnitStringEqualityComparer(IgnoringCase)); private ICollection<char> CharsUniqueIgnoringCase(IEnumerable<char> actual) { var result = NonUniqueItemsInternal( actual.Select(x => x.ToString()), new NUnitStringEqualityComparer(IgnoringCase) ); return result.Select(x => x[0]).ToList(); } private static ICollection<T> NonUniqueItemsInternal<T>(IEnumerable<T> actual, IEqualityComparer<T> comparer) { var processedItems = new HashSet<T>(comparer); var knownNonUniques = new HashSet<T>(comparer); var nonUniques = new List<T>(); foreach (T item in actual) { // Check if 'item' is a duplicate of a previously-processed item if (!processedItems.Add(item)) { // Check if 'item' has previously been flagged as a duplicate if (knownNonUniques.Add(item)) { nonUniques.Add(item); if (nonUniques.Count == MsgUtils.DefaultMaxItems) break; } } } return nonUniques; } // Return true if NUnitEqualityHandler has special logic for Type private static bool IsHandledSpeciallyByNUnit(Type type) { if (type == typeof(string)) return false; // even though it's IEnumerable return type.IsArray || typeof(IEnumerable).IsAssignableFrom(type) // Covers lists, collections, dictionaries as well || typeof(System.IO.Stream).IsAssignableFrom(type) // Covers all streams || typeof(System.IO.DirectoryInfo).IsAssignableFrom(type) // Unlikely to be derived, but just in case || type.FullName == "System.Tuple" || type.FullName == "System.ValueTuple"; } private static Type? GetGenericTypeArgument(IEnumerable actual) { foreach (var type in actual.GetType().GetInterfaces()) { if (type.FullName.StartsWith("System.Collections.Generic.IEnumerable`1", StringComparison.Ordinal)) { return type.GenericTypeArguments[0]; } } return null; } private sealed class NUnitStringEqualityComparer : IEqualityComparer<string> { private readonly bool _ignoreCase; public NUnitStringEqualityComparer(bool ignoreCase) { _ignoreCase = ignoreCase; } public bool Equals(string x, string y) { string s1 = _ignoreCase ? x.ToLower() : x; string s2 = _ignoreCase ? y.ToLower() : y; return s1.Equals(s2); } public int GetHashCode(string obj) { if (obj is null) return 0; else if (_ignoreCase) return obj.ToLower().GetHashCode(); else return obj.GetHashCode(); } } internal sealed class UniqueItemsConstraintResult : ConstraintResult { internal ICollection NonUniqueItems { get; } public UniqueItemsConstraintResult(IConstraint constraint, object actualValue, ICollection nonUniqueItems) : base(constraint, actualValue, nonUniqueItems.Count == 0) { NonUniqueItems = nonUniqueItems; } public override void WriteAdditionalLinesTo(MessageWriter writer) { if (this.Status == ConstraintStatus.Failure) { writer.Write(" Not unique items: "); var output = MsgUtils.FormatCollection(NonUniqueItems, 0, MsgUtils.DefaultMaxItems); writer.WriteLine(output); } } } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections; using System.Collections.Generic; using System.Threading; using OpenMetaverse; using log4net; using OpenSim.Framework; using OpenSim.Region.Framework.Scenes; using OpenSim.Region.ScriptEngine.Shared; using OpenSim.Region.ScriptEngine.Interfaces; namespace OpenSim.Region.ScriptEngine.Interfaces { public enum StateSource { RegionStart = 0, NewRez = 1, PrimCrossing = 2, ScriptedRez = 3, AttachedRez = 4, Teleporting = 5 } public interface IScriptWorkItem { bool Cancel(); bool Abort(); /// <summary> /// Wait for the work item to complete. /// </summary> /// <param name='t'>The number of milliseconds to wait. Must be >= -1 (Timeout.Infinite).</param> bool Wait(int t); } /// <summary> /// Interface for interaction with a particular script instance /// </summary> public interface IScriptInstance { /// <summary> /// Debug level for this script instance. /// </summary> /// <remarks> /// Level == 0, no extra data is logged. /// Level >= 1, state changes are logged. /// Level >= 2, event firing is logged. /// <value> /// The debug level. /// </value> int DebugLevel { get; set; } /// <summary> /// Is the script currently running? /// </summary> bool Running { get; set; } /// <summary> /// Is the script suspended? /// </summary> bool Suspended { get; set; } /// <summary> /// Is the script shutting down? /// </summary> bool ShuttingDown { get; set; } /// <summary> /// Script state /// </summary> string State { get; set; } /// <summary> /// If true then the engine is responsible for persisted state. If false then some other component may /// persist state (e.g. attachments persisting in assets). /// </summary> bool StatePersistedHere { get; } /// <summary> /// Time the script was last started /// </summary> DateTime TimeStarted { get; } /// <summary> /// Tick the last measurement period was started. /// </summary> long MeasurementPeriodTickStart { get; } /// <summary> /// Ticks spent executing in the last measurement period. /// </summary> long MeasurementPeriodExecutionTime { get; } /// <summary> /// Scene part in which this script instance is contained. /// </summary> SceneObjectPart Part { get; } IScriptEngine Engine { get; } UUID AppDomain { get; set; } string PrimName { get; } string ScriptName { get; } UUID ItemID { get; } UUID ObjectID { get; } /// <summary> /// UUID of the root object for the linkset that the script is in. /// </summary> UUID RootObjectID { get; } /// <summary> /// Local id of the root object for the linkset that the script is in. /// </summary> uint RootLocalID { get; } uint LocalID { get; } UUID AssetID { get; } /// <summary> /// Inventory item containing the script used. /// </summary> TaskInventoryItem ScriptTask { get; } Queue EventQueue { get; } /// <summary> /// Number of events queued for processing. /// </summary> long EventsQueued { get; } /// <summary> /// Number of events processed by this script instance. /// </summary> long EventsProcessed { get; } void ClearQueue(); int StartParam { get; set; } void RemoveState(); void Init(); void Start(); /// <summary> /// Stop the script instance. /// </summary> /// <remarks> /// This must not be called by a thread that is in the process of handling an event for this script. Otherwise /// there is a danger that it will self-abort and not complete the reset. /// </remarks> /// <param name="timeout"></param> /// How many milliseconds we will wait for an existing script event to finish before /// forcibly aborting that event. /// <param name="clearEventQueue">If true then the event queue is also cleared</param> /// <returns>true if the script was successfully stopped, false otherwise</returns> bool Stop(int timeout, bool clearEventQueue = false); void SetState(string state); /// <summary> /// Post an event to this script instance. /// </summary> /// <param name="data"></param> void PostEvent(EventParams data); void Suspend(); void Resume(); /// <summary> /// Process the next event queued for this script instance. /// </summary> /// <returns></returns> object EventProcessor(); int EventTime(); /// <summary> /// Reset the script. /// </summary> /// <remarks> /// This must not be called by a thread that is in the process of handling an event for this script. Otherwise /// there is a danger that it will self-abort and not complete the reset. Such a thread must call /// ApiResetScript() instead. /// </remarks> /// <param name='timeout'> /// How many milliseconds we will wait for an existing script event to finish before /// forcibly aborting that event prior to script reset. /// </param> void ResetScript(int timeout); /// <summary> /// Reset the script. /// </summary> /// <remarks> /// This must not be called by any thread other than the one executing the scripts current event. This is /// because there is no wait or abort logic if another thread is in the middle of processing a script event. /// Such an external thread should use ResetScript() instead. /// </remarks> void ApiResetScript(); Dictionary<string, object> GetVars(); void SetVars(Dictionary<string, object> vars); DetectParams GetDetectParams(int idx); UUID GetDetectID(int idx); void SaveState(); void DestroyScriptInstance(); IScriptApi GetApi(string name); Dictionary<KeyValuePair<int, int>, KeyValuePair<int, int>> LineMap { get; set; } string GetAssemblyName(); string GetXMLState(); double MinEventDelay { set; } UUID RegionID { get; } } }
using System; using System.Collections.Generic; using System.Linq; using UnityEditor.Experimental.VFX; using UnityEngine; using UnityEngine.Experimental.VFX; namespace UnityEditor.VFX { public static class VFXReflectionHelper { public static T[] CollectStaticReadOnlyExpression<T>(Type expressionType, System.Reflection.BindingFlags additionnalFlag = System.Reflection.BindingFlags.Public) { var members = expressionType.GetFields(System.Reflection.BindingFlags.Static | additionnalFlag) .Where(m => m.IsInitOnly && m.FieldType == typeof(T)) .ToArray(); var expressions = members.Select(m => (T)m.GetValue(null)).ToArray(); return expressions; } } abstract partial class VFXExpression { public struct Operands { public static readonly int OperandCount = 4; int data0; int data1; int data2; int data3; public Operands(int defaultValue) { data0 = defaultValue; data1 = defaultValue; data2 = defaultValue; data3 = defaultValue; } // This ugly code is for optimization purpose (no garbage created) public int this[int index] { get { switch (index) { case 0: return data0; case 1: return data1; case 2: return data2; case 3: return data3; default: throw new IndexOutOfRangeException(); } } set { switch (index) { case 0: data0 = value; break; case 1: data1 = value; break; case 2: data2 = value; break; case 3: data3 = value; break; default: throw new IndexOutOfRangeException(); } } } public int[] ToArray() { return new int[] { data0, data1, data2, data3 }; } } [Flags] public enum Flags { None = 0, Value = 1 << 0, // Expression is a value, get/set can be called on it Foldable = 1 << 1, // Expression is not a constant but can be folded anyway Constant = 1 << 2, // Expression is a constant, it can be folded InvalidOnGPU = 1 << 3, // Expression can be evaluated on GPU InvalidOnCPU = 1 << 4, // Expression can be evaluated on CPU PerElement = 1 << 5, // Expression is per element NotCompilableOnCPU = InvalidOnCPU | PerElement //Helper to filter out invalid expression on CPU } public static bool IsFloatValueType(VFXValueType valueType) { return valueType == VFXValueType.Float || valueType == VFXValueType.Float2 || valueType == VFXValueType.Float3 || valueType == VFXValueType.Float4; } public static bool IsUIntValueType(VFXValueType valueType) { return valueType == VFXValueType.Uint32; } public static bool IsIntValueType(VFXValueType valueType) { return valueType == VFXValueType.Int32; } public static bool IsBoolValueType(VFXValueType valueType) { return valueType == VFXValueType.Boolean; } public static int TypeToSize(VFXValueType type) { return VFXExpressionHelper.GetSizeOfType(type); } public static string TypeToCode(VFXValueType type) { switch (type) { case VFXValueType.Float: return "float"; case VFXValueType.Float2: return "float2"; case VFXValueType.Float3: return "float3"; case VFXValueType.Float4: return "float4"; case VFXValueType.Int32: return "int"; case VFXValueType.Uint32: return "uint"; case VFXValueType.Texture2D: return "Texture2D"; case VFXValueType.Texture2DArray: return "Texture2DArray"; case VFXValueType.Texture3D: return "Texture3D"; case VFXValueType.TextureCube: return "TextureCube"; case VFXValueType.TextureCubeArray: return "TextureCubeArray"; case VFXValueType.Matrix4x4: return "float4x4"; case VFXValueType.Boolean: return "bool"; } throw new NotImplementedException(type.ToString()); } // As certain type of uniforms are not handled in material, we need to use floats instead public static string TypeToUniformCode(VFXValueType type) { switch (type) { case VFXValueType.Float: return "float"; case VFXValueType.Float2: return "float2"; case VFXValueType.Float3: return "float3"; case VFXValueType.Float4: return "float4"; case VFXValueType.Int32: return "float"; case VFXValueType.Uint32: return "float"; case VFXValueType.Matrix4x4: return "float4x4"; case VFXValueType.Boolean: return "float"; } throw new NotImplementedException(type.ToString()); } public static Type TypeToType(VFXValueType type) { switch (type) { case VFXValueType.Float: return typeof(float); case VFXValueType.Float2: return typeof(Vector2); case VFXValueType.Float3: return typeof(Vector3); case VFXValueType.Float4: return typeof(Vector4); case VFXValueType.Int32: return typeof(int); case VFXValueType.Uint32: return typeof(uint); case VFXValueType.Texture2D: return typeof(Texture); case VFXValueType.Texture2DArray: return typeof(Texture); case VFXValueType.Texture3D: return typeof(Texture); case VFXValueType.TextureCube: return typeof(Texture); case VFXValueType.TextureCubeArray: return typeof(Texture); case VFXValueType.Matrix4x4: return typeof(Matrix4x4); case VFXValueType.Mesh: return typeof(Mesh); case VFXValueType.Curve: return typeof(AnimationCurve); case VFXValueType.ColorGradient: return typeof(Gradient); case VFXValueType.Boolean: return typeof(bool); } throw new NotImplementedException(type.ToString()); } public static bool IsTypeValidOnGPU(VFXValueType type) { switch (type) { case VFXValueType.Float: case VFXValueType.Float2: case VFXValueType.Float3: case VFXValueType.Float4: case VFXValueType.Int32: case VFXValueType.Uint32: case VFXValueType.Texture2D: case VFXValueType.Texture2DArray: case VFXValueType.Texture3D: case VFXValueType.TextureCube: case VFXValueType.TextureCubeArray: case VFXValueType.Matrix4x4: case VFXValueType.Boolean: return true; } return false; } public static bool IsTexture(VFXValueType type) { switch (type) { case VFXValueType.Texture2D: case VFXValueType.Texture2DArray: case VFXValueType.Texture3D: case VFXValueType.TextureCube: case VFXValueType.TextureCubeArray: return true; } return false; } public static bool IsUniform(VFXValueType type) { switch (type) { case VFXValueType.Float: case VFXValueType.Float2: case VFXValueType.Float3: case VFXValueType.Float4: case VFXValueType.Int32: case VFXValueType.Uint32: case VFXValueType.Matrix4x4: case VFXValueType.Boolean: return true; } return false; } public static Type GetMatchingScalar(Type type) { var vfxType = GetVFXValueTypeFromType(type); if (vfxType == VFXValueType.None) { var affinityFallback = VFXOperatorDynamicOperand.GetTypeAffinityList(type).GetEnumerator(); while (affinityFallback.MoveNext() && vfxType == VFXValueType.None) { vfxType = GetVFXValueTypeFromType(affinityFallback.Current); } } return TypeToType(GetMatchingScalar(vfxType)); } public static VFXValueType GetMatchingScalar(VFXValueType type) { if (IsFloatValueType(type)) return VFXValueType.Float; if (IsUIntValueType(type)) return VFXValueType.Uint32; if (IsIntValueType(type)) return VFXValueType.Int32; return VFXValueType.None; } public static VFXValueType GetVFXValueTypeFromType(Type type) { if (type == typeof(float)) return VFXValueType.Float; if (type == typeof(Vector2)) return VFXValueType.Float2; if (type == typeof(Vector3)) return VFXValueType.Float3; if (type == typeof(Vector4)) return VFXValueType.Float4; if (type == typeof(Color)) return VFXValueType.Float4; if (type == typeof(int)) return VFXValueType.Int32; if (type == typeof(uint)) return VFXValueType.Uint32; if (type == typeof(Texture2D)) return VFXValueType.Texture2D; if (type == typeof(Texture2DArray)) return VFXValueType.Texture2DArray; if (type == typeof(Texture3D)) return VFXValueType.Texture3D; if (type == typeof(Cubemap)) return VFXValueType.TextureCube; if (type == typeof(CubemapArray)) return VFXValueType.TextureCubeArray; if (type == typeof(Matrix4x4)) return VFXValueType.Matrix4x4; if (type == typeof(AnimationCurve)) return VFXValueType.Curve; if (type == typeof(Gradient)) return VFXValueType.ColorGradient; if (type == typeof(Mesh)) return VFXValueType.Mesh; if (type == typeof(List<Vector3>)) return VFXValueType.Spline; if (type == typeof(bool)) return VFXValueType.Boolean; return VFXValueType.None; } private static Dictionary<VFXExpression, VFXExpression> s_ExpressionCache = new Dictionary<VFXExpression, VFXExpression>(); public static void ClearCache() { s_ExpressionCache.Clear(); } //Ideally, we should use HashSet<T>.TryGetValue https://msdn.microsoft.com/en-us/library/mt829070(v=vs.110).aspx //but it's available only in 4.7, Dictionary<T, T> is a workaround, sensible same performance but there is a waste of memory private void SimplifyWithCacheParents() { for (int i = 0; i < m_Parents.Length; ++i) { VFXExpression parentEq; if (!s_ExpressionCache.TryGetValue(parents[i], out parentEq)) { s_ExpressionCache.Add(parents[i], parents[i]); } else { m_Parents[i] = parentEq; } } } protected VFXExpression(Flags flags, params VFXExpression[] parents) { m_Parents = parents; SimplifyWithCacheParents(); m_Flags = flags; PropagateParentsFlags(); } // Only do that when constructing an instance if needed private void Initialize(Flags additionalFlags, VFXExpression[] parents) { m_Parents = parents; SimplifyWithCacheParents(); m_Flags |= additionalFlags; PropagateParentsFlags(); m_HashCodeCached = false; // as expression is mutated } //Helper using reflection to recreate a concrete type from an abstract class (useful with reduce behavior) private static VFXExpression CreateNewInstance(Type expressionType) { var allconstructors = expressionType.GetConstructors().ToArray(); if (allconstructors.Length == 0) return null; //Only static readonly expression allowed, constructors are private (attribute or builtIn) var constructor = allconstructors .OrderBy(o => o.GetParameters().Count()) //promote simplest (or default) constructors .First(); var param = constructor.GetParameters().Select(o => { var type = o.GetType(); return type.IsValueType ? Activator.CreateInstance(type) : null; }).ToArray(); return (VFXExpression)constructor.Invoke(param); } private VFXExpression CreateNewInstance() { return CreateNewInstance(GetType()); } // Reduce the expression protected virtual VFXExpression Reduce(VFXExpression[] reducedParents) { if (reducedParents.Length == 0) return this; var reduced = CreateNewInstance(); reduced.Initialize(m_Flags, reducedParents); return reduced; } // Evaluate the expression protected virtual VFXExpression Evaluate(VFXExpression[] constParents) { throw new NotImplementedException(); } // Get the HLSL code snippet public virtual string GetCodeString(string[] parents) { throw new NotImplementedException(GetType().ToString()); } // Get the operands for the runtime evaluation public Operands GetOperands(VFXExpressionGraph graph) { var addOperands = additionnalOperands; if (parents.Length + addOperands.Length > 4) throw new Exception("Too much parameter for expression : " + this); var data = new Operands(-1); if (graph != null) for (int i = 0; i < parents.Length; ++i) data[i] = graph.GetFlattenedIndex(parents[i]); for (int i = 0; i < addOperands.Length; ++i) data[Operands.OperandCount - addOperands.Length + i] = addOperands[i]; return data; } public virtual IEnumerable<VFXAttributeInfo> GetNeededAttributes() { return Enumerable.Empty<VFXAttributeInfo>(); } public bool Is(Flags flag) { return (m_Flags & flag) == flag; } public bool IsAny(Flags flag) { return (m_Flags & flag) != 0; } public virtual VFXValueType valueType { get { var data = GetOperands(null); return VFXExpressionHelper.GetTypeOfOperation(operation, data[0], data[1], data[2], data[3]); } } public abstract VFXExpressionOperation operation { get; } public VFXExpression[] parents { get { return m_Parents; } } public override bool Equals(object obj) { if (ReferenceEquals(this, obj)) return true; var other = obj as VFXExpression; if (other == null) return false; if (GetType() != other.GetType()) return false; if (operation != other.operation) return false; if (valueType != other.valueType) return false; if (m_Flags != other.m_Flags) return false; if (GetHashCode() != other.GetHashCode()) return false; var operands = additionnalOperands; var otherOperands = other.additionnalOperands; if (operands.Length != otherOperands.Length) return false; for (int i = 0; i < operands.Length; ++i) if (operands[i] != otherOperands[i]) return false; var thisParents = parents; var otherParents = other.parents; if (thisParents == null && otherParents == null) return true; if (thisParents == null || otherParents == null) return false; if (thisParents.Length != otherParents.Length) return false; for (int i = 0; i < thisParents.Length; ++i) if (!thisParents[i].Equals(otherParents[i])) return false; return true; } public override sealed int GetHashCode() { if (!m_HashCodeCached) { m_HashCode = GetInnerHashCode(); m_HashCodeCached = true; } return m_HashCode; } protected virtual int GetInnerHashCode() { int hash = GetType().GetHashCode(); var parents = this.parents; for (int i = 0; i < parents.Length; ++i) hash = (hash * 397) ^ parents[i].GetHashCode(); // 397 taken from resharper var operands = additionnalOperands; for (int i = 0; i < operands.Length; ++i) hash = (hash * 397) ^ operands[i].GetHashCode(); hash = (hash * 397) ^ m_Flags.GetHashCode(); hash = (hash * 397) ^ valueType.GetHashCode(); hash = (hash * 397) ^ operation.GetHashCode(); return hash; } private static readonly int[] k_EmptyOperands = Enumerable.Empty<int>().ToArray(); protected virtual int[] additionnalOperands { get { return k_EmptyOperands; } } public virtual T Get<T>() { var value = (this as VFXValue<T>); if (value == null) { throw new ArgumentException(string.Format("Get isn't available for {0} with {1}", typeof(T).FullName, GetType().FullName)); } return value.Get(); } public virtual object GetContent() { throw new ArgumentException(string.Format("GetContent isn't available for {0}", GetType().FullName)); } private void PropagateParentsFlags() { if (m_Parents.Length > 0) { bool foldable = true; foreach (var parent in m_Parents) { foldable &= parent.Is(Flags.Foldable); m_Flags |= (parent.m_Flags & (Flags.NotCompilableOnCPU)); if (parent.IsAny(Flags.NotCompilableOnCPU) && parent.Is(Flags.InvalidOnGPU)) m_Flags |= Flags.InvalidOnGPU; // Only propagate GPU validity for per element expressions } if (foldable) m_Flags |= Flags.Foldable; else m_Flags &= ~Flags.Foldable; } } public static VFXExpression operator*(VFXExpression a, VFXExpression b) { return new VFXExpressionMul(a, b); } public static VFXExpression operator/(VFXExpression a, VFXExpression b) { return new VFXExpressionDivide(a, b); } public static VFXExpression operator+(VFXExpression a, VFXExpression b) { return new VFXExpressionAdd(a, b); } public static VFXExpression operator-(VFXExpression a, VFXExpression b) { return new VFXExpressionSubtract(a, b); } public static VFXExpression operator|(VFXExpression a, VFXExpression b) { return new VFXExpressionBitwiseOr(a, b); } public static VFXExpression operator&(VFXExpression a, VFXExpression b) { return new VFXExpressionBitwiseAnd(a, b); } public static VFXExpression operator|(VFXExpression a, uint b) { return new VFXExpressionBitwiseOr(a, VFXValue.Constant(b)); } public static VFXExpression operator&(VFXExpression a, uint b) { return new VFXExpressionBitwiseAnd(a, VFXValue.Constant(b)); } public static VFXExpression operator<<(VFXExpression a, int shift) { return new VFXExpressionBitwiseLeftShift(a, VFXValue.Constant((uint)shift)); } public static VFXExpression operator>>(VFXExpression a, int shift) { return new VFXExpressionBitwiseRightShift(a, VFXValue.Constant((uint)shift)); } public VFXExpression this[int index] { get { return new VFXExpressionExtractComponent(this, index); } } public VFXExpression x { get { return new VFXExpressionExtractComponent(this, 0); } } public VFXExpression y { get { return new VFXExpressionExtractComponent(this, 1); } } public VFXExpression z { get { return new VFXExpressionExtractComponent(this, 2); } } public VFXExpression w { get { return new VFXExpressionExtractComponent(this, 3); } } public VFXExpression xxx { get { return new VFXExpressionCombine(x, x, x); } } public VFXExpression yyy { get { return new VFXExpressionCombine(y, y, y); } } public VFXExpression zzz { get { return new VFXExpressionCombine(z, z, z); } } private Flags m_Flags = Flags.None; private VFXExpression[] m_Parents; private int m_HashCode; private bool m_HashCodeCached = false; } }
//--------------------------------------------------------------------------- // // Copyright (c) Microsoft Corporation. All rights reserved. // // Description: DrawingGroup represents a collection of Drawing objects, and // can apply group-operations such as clip and opacity to it's // collections // // History: // // 2004/11/17 : timothyc - Created it. // //--------------------------------------------------------------------------- using System; using System.Windows.Threading; using MS.Win32; using System.Security; using System.Security.Permissions; using System.Windows.Markup; using System.Windows.Media; using System.Windows.Media.Imaging; using System.Windows.Media.Animation; using System.Windows.Media.Composition; using System.Windows.Media.Effects; using System.Diagnostics; using System.Collections; using System.Collections.Generic; using MS.Internal; using MS.Internal.Media; using System.Resources; using MS.Utility; using System.Runtime.InteropServices; using MS.Internal.PresentationCore; using System.ComponentModel.Design.Serialization; using System.ComponentModel; using SR = MS.Internal.PresentationCore.SR; using SRID = MS.Internal.PresentationCore.SRID; namespace System.Windows.Media { /// <summary> /// DrawingGroup represents a collection of Drawing objects, and /// can apply group-operations such as clip and opacity to it's /// collections. /// </summary> [ContentProperty("Children")] public sealed partial class DrawingGroup : Drawing { #region Constructors /// <summary> /// Default DrawingGroup constructor. /// Constructs an object with all properties set to their default values. /// </summary> public DrawingGroup() { } #endregion Constructors #region Public methods /// <summary> /// Opens the DrawingGroup for re-populating it's children, clearing any existing /// children. /// </summary> /// <returns> /// Returns DrawingContext to populate the DrawingGroup's children. /// </returns> public DrawingContext Open() { VerifyOpen(); _openedForAppend = false; return new DrawingGroupDrawingContext(this); } /// <summary> /// Opens the DrawingGroup for populating it's children, appending to /// any existing children in the collection. /// </summary> /// <returns> /// Returns DrawingContext to populate the DrawingGroup's children. /// </returns> public DrawingContext Append() { VerifyOpen(); _openedForAppend = true; return new DrawingGroupDrawingContext(this); } #endregion Public methods #region Internal methods /// <summary> /// Called by a DrawingContext returned from Open or Append when the content /// created by it needs to be committed (because DrawingContext.Close/Dispose /// was called) /// </summary> /// <param name="rootDrawingGroupChildren"> /// Collection containing the Drawing elements created by a DrawingContext /// returned from Open or Append. /// </param> internal void Close(DrawingCollection rootDrawingGroupChildren) { WritePreamble(); Debug.Assert(_open); Debug.Assert(rootDrawingGroupChildren != null); if (!_openedForAppend) { // Clear out the previous contents by replacing the current collection with // the new collection. // // When more than one element exists in rootDrawingGroupChildren, the // DrawingContext had to create this new collection anyways. To behave // consistently between the one-element and many-element cases, // we always set Children to a new DrawingCollection instance during Close(). // // Doing this also avoids having to protect against exceptions being thrown // from user-code, which could be executed if a Changed event was fired when // we tried to add elements to a pre-existing collection. // // The collection created by the DrawingContext will no longer be // used after the DrawingContext is closed, so we can take ownership // of the reference here to avoid any more unneccesary copies. Children = rootDrawingGroupChildren; } else { // // // Append the collection to the current Children collection // // DrawingCollection children = Children; // // Ensure that we can Append to the Children collection // if (children == null) { throw new InvalidOperationException(SR.Get(SRID.DrawingGroup_CannotAppendToNullCollection)); } if (children.IsFrozen) { throw new InvalidOperationException(SR.Get(SRID.DrawingGroup_CannotAppendToFrozenCollection)); } // Append the new collection to our current Children. // // TransactionalAppend rolls-back the Append operation in the event // an exception is thrown from the Changed event. children.TransactionalAppend(rootDrawingGroupChildren); } // This DrawingGroup is no longer open _open = false; } /// <summary> /// Calls methods on the DrawingContext that are equivalent to the /// Drawing with the Drawing's current value. /// </summary> internal override void WalkCurrentValue(DrawingContextWalker ctx) { int popCount = 0; // We avoid unneccessary ShouldStopWalking checks based on assumptions // about when ShouldStopWalking is set. Guard that assumption with an // assertion. // // ShouldStopWalking is currently only set during a hit-test walk after // an object has been hit. Because a DrawingGroup can't be hit until after // the first Drawing is tested, this method doesn't check ShouldStopWalking // until after the first child. // // We don't need to add this check to other Drawing subclasses for // the same reason -- if the Drawing being tested isn't a DrawingGroup, // they are always the 'first child'. // // If this assumption is ever broken then the ShouldStopWalking // check should be done on the first child -- including in the // WalkCurrentValue method of other Drawing subclasses. Debug.Assert(!ctx.ShouldStopWalking); // // Draw the transform property // // Avoid calling PushTransform if the base value is set to the default and // no animations have been set on the property. if (!IsBaseValueDefault(DrawingGroup.TransformProperty) || (null != AnimationStorage.GetStorage(this, DrawingGroup.TransformProperty))) { ctx.PushTransform(Transform); popCount++; } // // Draw the clip property // // Avoid calling PushClip if the base value is set to the default and // no animations have been set on the property. if (!IsBaseValueDefault(DrawingGroup.ClipGeometryProperty) || (null != AnimationStorage.GetStorage(this, DrawingGroup.ClipGeometryProperty))) { ctx.PushClip(ClipGeometry); popCount++; } // // Draw the opacity property // // Avoid calling PushOpacity if the base value is set to the default and // no animations have been set on the property. if (!IsBaseValueDefault(DrawingGroup.OpacityProperty) || (null != AnimationStorage.GetStorage(this, DrawingGroup.OpacityProperty))) { // Push the current value of the opacity property, which // is what Opacity returns. ctx.PushOpacity(Opacity); popCount++; } // Draw the opacity mask property // if (OpacityMask != null) { ctx.PushOpacityMask(OpacityMask); popCount++; } // // Draw the effect property // // Push the current value of the effect property, which // is what BitmapEffect returns. if (BitmapEffect != null) { // Disable warning about obsolete method. This code must remain active // until we can remove the public BitmapEffect APIs. #pragma warning disable 0618 ctx.PushEffect(BitmapEffect, BitmapEffectInput); #pragma warning restore 0618 popCount++; } // // Draw the Children collection // // Get the current value of the children collection DrawingCollection collection = Children; // Call Walk on each child if (collection != null) { for (int i = 0; i < collection.Count; i++) { Drawing drawing = collection.Internal_GetItem(i); if (drawing != null) { drawing.WalkCurrentValue(ctx); // Don't visit the remaining children if the previous // child caused us to stop walking. if (ctx.ShouldStopWalking) { break; } } } } // // Call Pop() for every Push // // Avoid placing this logic in a finally block because if an exception is // thrown, the Walk is simply aborted. There is no requirement to Walk // through Pop instructions when an exception is thrown. // for (int i = 0; i < popCount; i++) { ctx.Pop(); } } #endregion Internal methods #region Private Methods /// <summary> /// Called by both Open() and Append(), this method verifies the /// DrawingGroup isn't already open, and set's the open flag. /// </summary> private void VerifyOpen() { WritePreamble(); // Throw an exception if we are already opened if (_open) { throw new InvalidOperationException(SR.Get(SRID.DrawingGroup_AlreadyOpen)); } _open = true; } #endregion Private Methods #region Private fields private bool _openedForAppend; private bool _open; #endregion Private fields } }
using System; using System.ComponentModel; using System.Drawing; using System.Drawing.Design; using System.Drawing.Drawing2D; using System.Reflection; using System.Windows.Forms; using System.Windows.Forms.Design; namespace Design { /// <summary> /// Class extending the <see cref="ColorEditor"/> which adds the /// capability to also change the alpha value of the color. /// </summary> public class ColorEditorEx : ColorEditor { #region Class ColorUIWrapper /// <summary> /// Wrapper for the private ColorUI class nested within <see cref="ColorEditor"/>. /// It publishes its internals via reflection and adds a <see cref="TrackBar"/> to /// adjust teh alpha value. /// </summary> public class ColorUIWrapper { #region Fields private Control _control; private MethodInfo _startMethodInfo; private MethodInfo _endMethodInfo; private PropertyInfo _valuePropertyInfo; private TrackBar _tbAlpha; private Label _lblAlpha; private IWindowsFormsEditorService _service; #endregion #region Constructors /// <summary> /// Creates a new instance. /// </summary> /// <param name="colorEditor">The editor this instance belongs to.</param> public ColorUIWrapper(ColorEditorEx colorEditor, IWindowsFormsEditorService service) { _service = service; Type colorUiType = typeof(ColorEditor).GetNestedType("ColorUI", BindingFlags.CreateInstance | BindingFlags.NonPublic); ConstructorInfo constructorInfo = colorUiType.GetConstructor(new Type[] { typeof(ColorEditor) }); _control = (Control)constructorInfo.Invoke(new object[] { colorEditor }); Panel alphaPanel = new Panel(); alphaPanel.BackColor = SystemColors.Control; alphaPanel.Dock = DockStyle.Right; alphaPanel.Width = 28; _control.Controls.Add(alphaPanel); _tbAlpha = new TrackBar(); _tbAlpha.Orientation = Orientation.Vertical; _tbAlpha.Dock = DockStyle.Fill; _tbAlpha.TickStyle = TickStyle.None; _tbAlpha.Maximum = byte.MaxValue; _tbAlpha.Minimum = byte.MinValue; _tbAlpha.ValueChanged += new EventHandler(OnTrackBarAlphaValueChanged); alphaPanel.Controls.Add(_tbAlpha); _lblAlpha = new Label(); _lblAlpha.Text = "0"; _lblAlpha.Dock = DockStyle.Bottom; _lblAlpha.TextAlign = ContentAlignment.MiddleCenter; _lblAlpha.Click += new EventHandler(OnAlphaClick); alphaPanel.Controls.Add(_lblAlpha); _startMethodInfo = _control.GetType().GetMethod("Start"); _endMethodInfo = _control.GetType().GetMethod("End"); _valuePropertyInfo = _control.GetType().GetProperty("Value"); _control.Layout += new LayoutEventHandler(_control_Layout); TabControl tabControl = (TabControl)_control.Controls[0]; tabControl.SelectedIndexChanged += new EventHandler(tabControl_SelectedIndexChanged); _control.BackColor = _control.BackColor; //to update color of the tabs (xp style bug) //tabControl.Appearance = TabAppearance.FlatButtons; //tabControl.Alignment = TabAlignment.Bottom; } #endregion #region Public interface /// <summary> /// The control to be shown when a color is edited. /// The concrete type is ColorUI which is privately hidden /// within System.Drawing.Design. /// </summary> public Control Control { get { return _control; } } /// <summary> /// Gets the edited color with applied alpha value. /// </summary> public object Value { get { object result = _valuePropertyInfo.GetValue(_control, new object[0]); if (result is Color) { Color clr = (Color)result; if (clr.IsSystemColor || _tbAlpha.Value == 255 || clr == Color.Transparent) return clr; result = Color.FromArgb(_tbAlpha.Value, clr); } return result; } } /// <summary> /// Starts the editing process. /// </summary> /// <param name="service">The editor service.</param> /// <param name="value">The value to be edited.</param> public void Start(IWindowsFormsEditorService service, object value) { if (value is Color) { Color clr = (Color)value; _tbAlpha.Value = clr.A; if (!clr.IsKnownColor && clr.A < 255) { clr = Color.FromArgb(clr.R, clr.G, clr.B); foreach (KnownColor kc in Enum.GetValues(typeof(KnownColor))) { Color kcc = Color.FromKnownColor(kc); if (kcc.IsSystemColor || kc == KnownColor.Transparent) continue; if (kcc.R == clr.R && kcc.G == clr.G && kcc.B == clr.B) { clr = kcc; break; } } value = clr; } } _startMethodInfo.Invoke(_control, new object[] { service, value }); } /// <summary> /// End the editing process. /// </summary> public void End() { _endMethodInfo.Invoke(_control, new object[0]); } #endregion #region Privates private void _control_Layout(object sender, LayoutEventArgs e) { // resize control to fit alpha panel TabControl tabControl = (TabControl)_control.Controls[0]; Size size = tabControl.TabPages[0].Controls[0].Size; size += tabControl.Size - tabControl.SelectedTab.Size; _control.ClientSize = new Size(_tbAlpha.Width + size.Width, size.Height); } private void OnTrackBarAlphaValueChanged(object sender, EventArgs e) { _lblAlpha.Text = _tbAlpha.Value.ToString(); } private void OnAlphaClick(object sender, EventArgs e) { _service.CloseDropDown(); } private void tabControl_SelectedIndexChanged(object sender, EventArgs e) { // hide alpha panel when selected System colors tab _tbAlpha.Parent.Visible = (((TabControl)sender).SelectedIndex < 2); } #endregion } #endregion #region Fields private ColorUIWrapper _colorUI; #endregion #region Constructors /// <summary> /// Creates a new instance. /// </summary> public ColorEditorEx() { } #endregion #region Overridden from ColorEditor /// <summary> /// Edits the given value. /// </summary> /// <param name="context">Context infromation.</param> /// <param name="provider">Service provider.</param> /// <param name="value">Value to be edited.</param> /// <returns>An edited value.</returns> public override object EditValue(ITypeDescriptorContext context, IServiceProvider provider, object value) { if (provider != null) { IWindowsFormsEditorService service = (IWindowsFormsEditorService)provider.GetService(typeof(IWindowsFormsEditorService)); if (service == null) return value; if (_colorUI == null) _colorUI = new ColorUIWrapper(this, service); _colorUI.Start(service, value); service.DropDownControl(_colorUI.Control); if ((_colorUI.Value != null) && (((Color)_colorUI.Value) != Color.Empty)) { value = _colorUI.Value; } _colorUI.End(); } return value; } public override void PaintValue(PaintValueEventArgs e) { if (e.Value is Color && ((Color)e.Value).A < byte.MaxValue) { SmoothingMode oldMode = e.Graphics.SmoothingMode; e.Graphics.SmoothingMode = SmoothingMode.AntiAlias; if (Color.Transparent == (Color)e.Value) { // Paint cross Rectangle r = Rectangle.Inflate(e.Bounds, -1, -1); e.Graphics.DrawLine(Pens.Black, r.Left, r.Top, r.Right - 1, r.Bottom - 1); e.Graphics.DrawLine(Pens.Black, r.Right - 1, r.Top, r.Left, r.Bottom - 1); } else { // Paint diamond RectangleF r = e.Bounds; r.Width--; r.Height--; e.Graphics.FillRectangle(Brushes.White, r); PointF[] diamond = { new PointF(r.Left + r.Width/2, r.Top), new PointF(r.Right, r.Top + r.Height/2), new PointF(r.Left + r.Width/2, r.Bottom), new PointF(r.Left, r.Top + r.Height/2) }; e.Graphics.FillPolygon(Brushes.Black, diamond); // int oneThird = e.Bounds.Width / 3; // e.Graphics.FillRectangle(Brushes.White, new Rectangle(e.Bounds.X, e.Bounds.Y, oneThird, e.Bounds.Height - 1)); // e.Graphics.FillRectangle(Brushes.DarkGray, new Rectangle(e.Bounds.X + oneThird, e.Bounds.Y, oneThird, e.Bounds.Height - 1)); // e.Graphics.FillRectangle(Brushes.Black, new Rectangle(e.Bounds.X + oneThird * 2, e.Bounds.Y, e.Bounds.Width - oneThird * 2, e.Bounds.Height - 1)); } e.Graphics.SmoothingMode = oldMode; } base.PaintValue(e); } #endregion } }
using System; using System.Data; using System.Data.SqlClient; using Rainbow.Framework.Settings; namespace Rainbow.Framework.Content.Data { /// <summary> /// IBS Portal Picture module /// (c)2002 by Ender Malkoc /// </summary> public class PicturesDB { /// <summary> /// The AddPicture function is used to ADD Pictures to the Database /// </summary> /// <param name="moduleID">The module ID.</param> /// <param name="itemID">The item ID.</param> /// <param name="displayOrder">The display order.</param> /// <param name="metadataXml">The metadata XML.</param> /// <param name="shortDescription">The short description.</param> /// <param name="keywords">The keywords.</param> /// <param name="CreatedByUser">The created by user.</param> /// <param name="CreatedDate">The created date.</param> /// <returns></returns> public int AddPicture(int moduleID, int itemID, int displayOrder, string metadataXml, string shortDescription, string keywords, string CreatedByUser, DateTime CreatedDate) { // Create Instance of Connection and Command Object SqlConnection myConnection = Config.SqlConnectionString; SqlCommand myCommand = new SqlCommand("rb_AddPicture", myConnection); myCommand.CommandType = CommandType.StoredProcedure; // Add Parameters to SPROC SqlParameter parameterItemID = new SqlParameter("@ItemID", SqlDbType.Int, 4); parameterItemID.Direction = ParameterDirection.Output; myCommand.Parameters.Add(parameterItemID); SqlParameter parameterModuleID = new SqlParameter("@ModuleID", SqlDbType.Int, 4); parameterModuleID.Value = moduleID; myCommand.Parameters.Add(parameterModuleID); SqlParameter parameterDisplayOrder = new SqlParameter("@DisplayOrder", SqlDbType.Int, 4); parameterDisplayOrder.Value = displayOrder; myCommand.Parameters.Add(parameterDisplayOrder); SqlParameter parameterMetadataXml = new SqlParameter("@MetadataXml", SqlDbType.NVarChar); parameterMetadataXml.Value = metadataXml; myCommand.Parameters.Add(parameterMetadataXml); SqlParameter parameterShortDescription = new SqlParameter("@ShortDescription", SqlDbType.NVarChar); parameterShortDescription.Value = shortDescription; myCommand.Parameters.Add(parameterShortDescription); SqlParameter parameterKeywords = new SqlParameter("@Keywords", SqlDbType.NVarChar); parameterKeywords.Value = keywords; myCommand.Parameters.Add(parameterKeywords); SqlParameter parameterCreatedByUser = new SqlParameter("@CreatedByUser", SqlDbType.NVarChar, 100); parameterCreatedByUser.Value = CreatedByUser; myCommand.Parameters.Add(parameterCreatedByUser); SqlParameter parameterCreatedDate = new SqlParameter("@CreatedDate", SqlDbType.DateTime); parameterCreatedDate.Value = CreatedDate; myCommand.Parameters.Add(parameterCreatedDate); myConnection.Open(); try { myCommand.ExecuteNonQuery(); } finally { myConnection.Close(); } return Convert.ToInt32(parameterItemID.Value); } /// <summary> /// The GetPicture function is used to get all the Pictures in the module /// </summary> /// <param name="moduleID">The module ID.</param> /// <param name="page">The page.</param> /// <param name="recordsPerPage">The records per page.</param> /// <param name="version">The version.</param> /// <returns></returns> public DataSet GetPicturesPaged(int moduleID, int page, int recordsPerPage, WorkFlowVersion version) { // Create Instance of Connection and Command Object SqlConnection myConnection = Config.SqlConnectionString; SqlDataAdapter myCommand = new SqlDataAdapter("rb_GetPicturesPaged", myConnection); myCommand.SelectCommand.CommandType = CommandType.StoredProcedure; // Add Parameters to SPROC SqlParameter parameterModuleID = new SqlParameter("@ModuleID", SqlDbType.Int, 4); parameterModuleID.Value = moduleID; myCommand.SelectCommand.Parameters.Add(parameterModuleID); SqlParameter parameterPage = new SqlParameter("@Page", SqlDbType.Int, 4); parameterPage.Value = page; myCommand.SelectCommand.Parameters.Add(parameterPage); SqlParameter parameterRecordsPerPage = new SqlParameter("@RecordsPerPage", SqlDbType.Int, 4); parameterRecordsPerPage.Value = recordsPerPage; myCommand.SelectCommand.Parameters.Add(parameterRecordsPerPage); SqlParameter parameterWorkflowVersion = new SqlParameter("@WorkflowVersion", SqlDbType.Int, 4); parameterWorkflowVersion.Value = (int) version; myCommand.SelectCommand.Parameters.Add(parameterWorkflowVersion); // Create and Fill the DataSet DataSet myDataSet = new DataSet(); try { myCommand.Fill(myDataSet); } finally { myConnection.Close(); //by Manu fix close bug #2 } // return the DataSet return myDataSet; } /// <summary> /// The GetSinglePicture function is used to Get a single Picture /// from the database for display/edit /// </summary> /// <param name="itemID">The item ID.</param> /// <param name="version">The version.</param> /// <returns></returns> public SqlDataReader GetSinglePicture(int itemID, WorkFlowVersion version) { // Create Instance of Connection and Command Object SqlConnection myConnection = Config.SqlConnectionString; SqlCommand myCommand = new SqlCommand("rb_GetSinglePicture", myConnection); myCommand.CommandType = CommandType.StoredProcedure; // Add Parameters to SPROC SqlParameter parameterItemID = new SqlParameter("@ItemID", SqlDbType.Int, 4); parameterItemID.Value = itemID; myCommand.Parameters.Add(parameterItemID); SqlParameter parameterWorkflowVersion = new SqlParameter("@WorkflowVersion", SqlDbType.Int, 4); parameterWorkflowVersion.Value = (int) version; myCommand.Parameters.Add(parameterWorkflowVersion); // Execute the command myConnection.Open(); SqlDataReader result = myCommand.ExecuteReader(CommandBehavior.CloseConnection); // return the datareader return result; } /// <summary> /// The DeletePicture function is used to remove Pictures from the Database /// </summary> /// <param name="itemID">The item ID.</param> public void DeletePicture(int itemID) { // Create Instance of Connection and Command Object SqlConnection myConnection = Config.SqlConnectionString; SqlCommand myCommand = new SqlCommand("rb_DeletePicture", myConnection); myCommand.CommandType = CommandType.StoredProcedure; // Add Parameters to SPROC SqlParameter parameterItemID = new SqlParameter("@ItemID", SqlDbType.Int, 4); parameterItemID.Value = itemID; myCommand.Parameters.Add(parameterItemID); // Execute the command myConnection.Open(); try { myCommand.ExecuteNonQuery(); } finally { myConnection.Close(); } } /// <summary> /// The UpdatePicture function is used to update changes to the Pictures /// </summary> /// <param name="moduleID">The module ID.</param> /// <param name="itemID">The item ID.</param> /// <param name="displayOrder">The display order.</param> /// <param name="metadataXml">The metadata XML.</param> /// <param name="shortDescription">The short description.</param> /// <param name="keywords">The keywords.</param> /// <param name="CreatedByUser">The created by user.</param> /// <param name="CreatedDate">The created date.</param> public void UpdatePicture(int moduleID, int itemID, int displayOrder, string metadataXml, string shortDescription, string keywords, string CreatedByUser, DateTime CreatedDate) { // Create Instance of Connection and Command Object SqlConnection myConnection = Config.SqlConnectionString; SqlCommand myCommand = new SqlCommand("rb_UpdatePicture", myConnection); myCommand.CommandType = CommandType.StoredProcedure; // Add Parameters to SPROC SqlParameter parameterItemID = new SqlParameter("@ItemID", SqlDbType.Int, 4); parameterItemID.Value = itemID; myCommand.Parameters.Add(parameterItemID); SqlParameter parameterModuleID = new SqlParameter("@ModuleID", SqlDbType.Int, 4); parameterModuleID.Value = moduleID; myCommand.Parameters.Add(parameterModuleID); SqlParameter parameterDisplayOrder = new SqlParameter("@DisplayOrder", SqlDbType.Int, 4); parameterDisplayOrder.Value = displayOrder; myCommand.Parameters.Add(parameterDisplayOrder); SqlParameter parameterMetadataXml = new SqlParameter("@MetadataXml", SqlDbType.NVarChar); parameterMetadataXml.Value = metadataXml; myCommand.Parameters.Add(parameterMetadataXml); SqlParameter parameterShortDescription = new SqlParameter("@ShortDescription", SqlDbType.NVarChar); parameterShortDescription.Value = shortDescription; myCommand.Parameters.Add(parameterShortDescription); SqlParameter parameterKeywords = new SqlParameter("@Keywords", SqlDbType.NVarChar); parameterKeywords.Value = keywords; myCommand.Parameters.Add(parameterKeywords); SqlParameter parameterCreatedByUser = new SqlParameter("@CreatedByUser", SqlDbType.NVarChar, 100); parameterCreatedByUser.Value = CreatedByUser; myCommand.Parameters.Add(parameterCreatedByUser); SqlParameter parameterCreatedDate = new SqlParameter("@CreatedDate", SqlDbType.DateTime); parameterCreatedDate.Value = CreatedDate; myCommand.Parameters.Add(parameterCreatedDate); // Execute the command myConnection.Open(); try { myCommand.ExecuteNonQuery(); } finally { myConnection.Close(); } } } }
#region Using Statements using System; using System.Collections.Generic; using System.Text; using Microsoft.Xna.Framework; using JigLibX.Geometry; using JigLibX.Math; #endregion namespace JigLibX.Collision { /// <summary> /// Class CollDetectCapsulePlane /// </summary> public class CollDetectCapsulePlane : DetectFunctor { /// <summary> /// DetectFunctor for CapsulePlane collison detection. /// </summary> public CollDetectCapsulePlane() : base("CapsulePlane", (int)PrimitiveType.Capsule, (int)PrimitiveType.Plane) { } #if WINDOWS_PHONE /// <summary> /// CollDetect /// </summary> /// <param name="info"></param> /// <param name="collTolerance"></param> /// <param name="collisionFunctor"></param> public override void CollDetect(CollDetectInfo info, float collTolerance, CollisionFunctor collisionFunctor) { if (info.Skin0.GetPrimitiveOldWorld(info.IndexPrim0).Type == this.Type1) { CollisionSkin skinSwap = info.Skin0; info.Skin0 = info.Skin1; info.Skin1 = skinSwap; int primSwap = info.IndexPrim0; info.IndexPrim0 = info.IndexPrim1; info.IndexPrim1 = primSwap; } Vector3 body0Pos = (info.Skin0.Owner != null) ? info.Skin0.Owner.OldPosition : Vector3.Zero; Vector3 body1Pos = (info.Skin1.Owner != null) ? info.Skin1.Owner.OldPosition : Vector3.Zero; // todo - proper swept test Capsule oldCapsule = (Capsule)info.Skin0.GetPrimitiveOldWorld(info.IndexPrim0); Capsule newCapsule = (Capsule)info.Skin0.GetPrimitiveNewWorld(info.IndexPrim0); JigLibX.Geometry.Plane oldPlane = (JigLibX.Geometry.Plane)info.Skin1.GetPrimitiveOldWorld(info.IndexPrim1); JigLibX.Geometry.Plane newPlane = (JigLibX.Geometry.Plane)info.Skin1.GetPrimitiveNewWorld(info.IndexPrim1); Matrix newPlaneInvTransform = newPlane.InverseTransformMatrix; Matrix oldPlaneInvTransform = oldPlane.InverseTransformMatrix; SmallCollPointInfo[] collPtArray = SCPIStackAlloc(); { int numCollPts = 0; // the start { Vector3 oldCapsuleStartPos = Vector3.Transform(oldCapsule.Position, oldPlaneInvTransform); Vector3 newCapsuleStartPos = Vector3.Transform(newCapsule.Position, newPlaneInvTransform); float oldDist = Distance.PointPlaneDistance(oldCapsuleStartPos, oldPlane); float newDist = Distance.PointPlaneDistance(newCapsuleStartPos, newPlane); if (MathHelper.Min(newDist, oldDist) < collTolerance + newCapsule.Radius) { float oldDepth = oldCapsule.Radius - oldDist; // calc the world position based on the old position8(s) Vector3 worldPos = oldCapsule.Position - oldCapsule.Radius * oldPlane.Normal; // BEN-OPTIMISATION: Now reuses existing collPts instead of reallocating. collPtArray[numCollPts].R0 = worldPos - body0Pos; collPtArray[numCollPts].R1 = worldPos - body1Pos; collPtArray[numCollPts++].InitialPenetration = oldDepth; } } // the end { Vector3 oldCapsuleEndPos = Vector3.Transform(oldCapsule.GetEnd(), oldPlaneInvTransform); Vector3 newCapsuleEndPos = Vector3.Transform(newCapsule.GetEnd(), newPlaneInvTransform); float oldDist = Distance.PointPlaneDistance(oldCapsuleEndPos, oldPlane); float newDist = Distance.PointPlaneDistance(newCapsuleEndPos, newPlane); if (System.Math.Min(newDist, oldDist) < collTolerance + newCapsule.Radius) { float oldDepth = oldCapsule.Radius - oldDist; // calc the world position based on the old position(s) Vector3 worldPos = oldCapsule.GetEnd() - oldCapsule.Radius * oldPlane.Normal; // BEN-OPTIMISATION: Now reuses existing collPts instead of reallocating. collPtArray[numCollPts].R0 = worldPos - body0Pos; collPtArray[numCollPts].R1 = worldPos - body1Pos; collPtArray[numCollPts++].InitialPenetration = oldDepth; } if (numCollPts > 0) { collisionFunctor.CollisionNotify(ref info, ref oldPlane.normal, collPtArray, numCollPts); } } FreeStackAlloc(collPtArray); } } #else /// <summary> /// CollDetect /// </summary> /// <param name="info"></param> /// <param name="collTolerance"></param> /// <param name="collisionFunctor"></param> public override void CollDetect(CollDetectInfo info, float collTolerance, CollisionFunctor collisionFunctor) { if (info.Skin0.GetPrimitiveOldWorld(info.IndexPrim0).Type == this.Type1) { CollisionSkin skinSwap = info.Skin0; info.Skin0 = info.Skin1; info.Skin1 = skinSwap; int primSwap = info.IndexPrim0; info.IndexPrim0 = info.IndexPrim1; info.IndexPrim1 = primSwap; } Vector3 body0Pos = (info.Skin0.Owner != null) ? info.Skin0.Owner.OldPosition : Vector3.Zero; Vector3 body1Pos = (info.Skin1.Owner != null) ? info.Skin1.Owner.OldPosition : Vector3.Zero; // todo - proper swept test Capsule oldCapsule = (Capsule)info.Skin0.GetPrimitiveOldWorld(info.IndexPrim0); Capsule newCapsule = (Capsule)info.Skin0.GetPrimitiveNewWorld(info.IndexPrim0); JigLibX.Geometry.Plane oldPlane = (JigLibX.Geometry.Plane)info.Skin1.GetPrimitiveOldWorld(info.IndexPrim1); JigLibX.Geometry.Plane newPlane = (JigLibX.Geometry.Plane)info.Skin1.GetPrimitiveNewWorld(info.IndexPrim1); Matrix newPlaneInvTransform = newPlane.InverseTransformMatrix; Matrix oldPlaneInvTransform = oldPlane.InverseTransformMatrix; unsafe { #if USE_STACKALLOC SmallCollPointInfo* collPts = stackalloc SmallCollPointInfo[MaxLocalStackSCPI]; #else SmallCollPointInfo[] collPtArray = SCPIStackAlloc(); fixed (SmallCollPointInfo* collPts = collPtArray) #endif { int numCollPts = 0; // the start { Vector3 oldCapsuleStartPos = Vector3.Transform(oldCapsule.Position, oldPlaneInvTransform); Vector3 newCapsuleStartPos = Vector3.Transform(newCapsule.Position, newPlaneInvTransform); float oldDist = Distance.PointPlaneDistance(oldCapsuleStartPos, oldPlane); float newDist = Distance.PointPlaneDistance(newCapsuleStartPos, newPlane); if (MathHelper.Min(newDist, oldDist) < collTolerance + newCapsule.Radius) { float oldDepth = oldCapsule.Radius - oldDist; // calc the world position based on the old position8(s) Vector3 worldPos = oldCapsule.Position - oldCapsule.Radius * oldPlane.Normal; // BEN-OPTIMISATION: Now reuses existing collPts instead of reallocating. collPts[numCollPts].R0 = worldPos - body0Pos; collPts[numCollPts].R1 = worldPos - body1Pos; collPts[numCollPts++].InitialPenetration = oldDepth; } } // the end { Vector3 oldCapsuleEndPos = Vector3.Transform(oldCapsule.GetEnd(), oldPlaneInvTransform); Vector3 newCapsuleEndPos = Vector3.Transform(newCapsule.GetEnd(), newPlaneInvTransform); float oldDist = Distance.PointPlaneDistance(oldCapsuleEndPos, oldPlane); float newDist = Distance.PointPlaneDistance(newCapsuleEndPos, newPlane); if (System.Math.Min(newDist, oldDist) < collTolerance + newCapsule.Radius) { float oldDepth = oldCapsule.Radius - oldDist; // calc the world position based on the old position(s) Vector3 worldPos = oldCapsule.GetEnd() - oldCapsule.Radius * oldPlane.Normal; // BEN-OPTIMISATION: Now reuses existing collPts instead of reallocating. collPts[numCollPts].R0 = worldPos - body0Pos; collPts[numCollPts].R1 = worldPos - body1Pos; collPts[numCollPts++].InitialPenetration = oldDepth; } if (numCollPts > 0) { collisionFunctor.CollisionNotify(ref info, ref oldPlane.normal, collPts, numCollPts); } } } #if !USE_STACKALLOC FreeStackAlloc(collPtArray); #endif } } #endif } }
// SNMP message extension class. // Copyright (C) 2008-2010 Malcolm Crowe, Lex Li, and other contributors. // // Permission is hereby granted, free of charge, to any person obtaining a copy of this // software and associated documentation files (the "Software"), to deal in the Software // without restriction, including without limitation the rights to use, copy, modify, merge, // publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons // to whom the Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all copies or // substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, // INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR // PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE // FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER // DEALINGS IN THE SOFTWARE. using System; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Net; using System.Net.Sockets; using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; using Lextm.SharpSnmpLib.Security; namespace Lextm.SharpSnmpLib.Messaging { /// <summary> /// Extension methods for <see cref="ISnmpMessage"/>. /// </summary> public static class SnmpMessageExtension { /// <summary> /// Gets the <see cref="SnmpType"/>. /// </summary> /// <param name="message">The <see cref="ISnmpMessage"/>.</param> /// <returns></returns> public static SnmpType TypeCode(this ISnmpMessage message) { if (message == null) { throw new ArgumentNullException("message"); } return message.Pdu().TypeCode; } /// <summary> /// Variables. /// </summary> /// <param name="message">The <see cref="ISnmpMessage"/>.</param> public static IList<Variable> Variables(this ISnmpMessage message) { if (message == null) { throw new ArgumentNullException("message"); } var code = message.TypeCode(); return code == SnmpType.Unknown ? new List<Variable>(0) : message.Scope.Pdu.Variables; } /// <summary> /// Request ID. /// </summary> /// <param name="message">The <see cref="ISnmpMessage"/>.</param> public static int RequestId(this ISnmpMessage message) { if (message == null) { throw new ArgumentNullException("message"); } return message.Scope.Pdu.RequestId.ToInt32(); } /// <summary> /// Gets the message ID. /// </summary> /// <value>The message ID.</value> /// <param name="message">The <see cref="ISnmpMessage"/>.</param> /// <remarks>For v3, message ID is different from request ID. For v1 and v2c, they are the same.</remarks> public static int MessageId(this ISnmpMessage message) { if (message == null) { throw new ArgumentNullException("message"); } return message.Header == Header.Empty ? message.RequestId() : message.Header.MessageId; } /// <summary> /// PDU. /// </summary> /// <param name="message">The <see cref="ISnmpMessage"/>.</param> [SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "Pdu")] public static ISnmpPdu Pdu(this ISnmpMessage message) { if (message == null) { throw new ArgumentNullException("message"); } return message.Scope.Pdu; } /// <summary> /// Community name. /// </summary> /// <param name="message">The <see cref="ISnmpMessage"/>.</param> public static OctetString Community(this ISnmpMessage message) { if (message == null) { throw new ArgumentNullException("message"); } return message.Parameters.UserName; } /// <summary> /// Sends an <see cref="ISnmpMessage"/>. /// </summary> /// <param name="message">The <see cref="ISnmpMessage"/>.</param> /// <param name="manager">Manager</param> public static void Send(this ISnmpMessage message, EndPoint manager) { if (message == null) { throw new ArgumentNullException("message"); } if (manager == null) { throw new ArgumentNullException("manager"); } var code = message.TypeCode(); if ((code != SnmpType.TrapV1Pdu && code != SnmpType.TrapV2Pdu) && code != SnmpType.ReportPdu) { throw new InvalidOperationException(string.Format( CultureInfo.InvariantCulture, "not a trap message: {0}", code)); } using (var socket = manager.GetSocket()) { message.Send(manager, socket); } } /// <summary> /// Sends an <see cref="ISnmpMessage"/>. /// </summary> /// <param name="message">The <see cref="ISnmpMessage"/>.</param> /// <param name="manager">Manager</param> /// <param name="socket">The socket.</param> public static void Send(this ISnmpMessage message, EndPoint manager, Socket socket) { if (message == null) { throw new ArgumentNullException("message"); } if (socket == null) { throw new ArgumentNullException("socket"); } if (manager == null) { throw new ArgumentNullException("manager"); } var code = message.TypeCode(); if ((code != SnmpType.TrapV1Pdu && code != SnmpType.TrapV2Pdu) && code != SnmpType.ReportPdu) { throw new InvalidOperationException(string.Format( CultureInfo.InvariantCulture, "not a trap message: {0}", code)); } var bytes = message.ToBytes(); socket.SendTo(bytes, 0, bytes.Length, SocketFlags.None, manager); } /// <summary> /// Sends an <see cref="ISnmpMessage"/>. /// </summary> /// <param name="message">The <see cref="ISnmpMessage"/>.</param> /// <param name="manager">Manager</param> public static void SendAsync(this ISnmpMessage message, EndPoint manager) { if (message == null) { throw new ArgumentNullException("message"); } if (manager == null) { throw new ArgumentNullException("manager"); } var code = message.TypeCode(); if ((code != SnmpType.TrapV1Pdu && code != SnmpType.TrapV2Pdu) && code != SnmpType.ReportPdu) { throw new InvalidOperationException(string.Format( CultureInfo.InvariantCulture, "not a trap message: {0}", code)); } using (var socket = manager.GetSocket()) { message.SendAsync(manager, socket); } } /// <summary> /// Sends an <see cref="ISnmpMessage"/>. /// </summary> /// <param name="message">The <see cref="ISnmpMessage"/>.</param> /// <param name="manager">Manager</param> /// <param name="socket">The socket.</param> public static void SendAsync(this ISnmpMessage message, EndPoint manager, Socket socket) { if (message == null) { throw new ArgumentNullException("message"); } if (socket == null) { throw new ArgumentNullException("socket"); } if (manager == null) { throw new ArgumentNullException("manager"); } var code = message.TypeCode(); if ((code != SnmpType.TrapV1Pdu && code != SnmpType.TrapV2Pdu) && code != SnmpType.ReportPdu) { throw new InvalidOperationException(string.Format( CultureInfo.InvariantCulture, "not a trap message: {0}", code)); } var bytes = message.ToBytes(); socket.BeginSendTo(bytes, 0, bytes.Length, SocketFlags.None, manager, ar => socket.EndSendTo(ar), null); } /// <summary> /// Sends this <see cref="ISnmpMessage"/> and handles the response from agent. /// </summary> /// <param name="request">The <see cref="ISnmpMessage"/>.</param> /// <param name="timeout">The time-out value, in milliseconds. The default value is 0, which indicates an infinite time-out period. Specifying -1 also indicates an infinite time-out period.</param> /// <param name="receiver">Port number.</param> /// <param name="registry">User registry.</param> /// <returns></returns> public static ISnmpMessage GetResponse(this ISnmpMessage request, int timeout, IPEndPoint receiver, UserRegistry registry) { // TODO: make more usage of UserRegistry. if (request == null) { throw new ArgumentNullException("request"); } if (receiver == null) { throw new ArgumentNullException("receiver"); } var code = request.TypeCode(); if (code == SnmpType.TrapV1Pdu || code == SnmpType.TrapV2Pdu || code == SnmpType.ReportPdu) { throw new InvalidOperationException(string.Format(CultureInfo.InvariantCulture, "not a request message: {0}", code)); } using (var socket = receiver.GetSocket()) { return request.GetResponse(timeout, receiver, registry, socket); } } /// <summary> /// Sends this <see cref="ISnmpMessage"/> and handles the response from agent. /// </summary> /// <param name="request">The <see cref="ISnmpMessage"/>.</param> /// <param name="timeout">The time-out value, in milliseconds. The default value is 0, which indicates an infinite time-out period. Specifying -1 also indicates an infinite time-out period.</param> /// <param name="receiver">Port number.</param> /// <returns></returns> public static ISnmpMessage GetResponse(this ISnmpMessage request, int timeout, IPEndPoint receiver) { if (request == null) { throw new ArgumentNullException("request"); } if (receiver == null) { throw new ArgumentNullException("receiver"); } var code = request.TypeCode(); if (code == SnmpType.TrapV1Pdu || code == SnmpType.TrapV2Pdu || code == SnmpType.ReportPdu) { throw new InvalidOperationException(string.Format(CultureInfo.InvariantCulture, "not a request message: {0}", code)); } using (var socket = receiver.GetSocket()) { return request.GetResponse(timeout, receiver, socket); } } /// <summary> /// Sends this <see cref="ISnmpMessage"/> and handles the response from agent. /// </summary> /// <param name="request">The <see cref="ISnmpMessage"/>.</param> /// <param name="timeout">The time-out value, in milliseconds. The default value is 0, which indicates an infinite time-out period. Specifying -1 also indicates an infinite time-out period.</param> /// <param name="receiver">Agent.</param> /// <param name="udpSocket">The UDP <see cref="Socket"/> to use to send/receive.</param> /// <returns></returns> public static ISnmpMessage GetResponse(this ISnmpMessage request, int timeout, IPEndPoint receiver, Socket udpSocket) { if (request == null) { throw new ArgumentNullException("request"); } if (receiver == null) { throw new ArgumentNullException("receiver"); } if (udpSocket == null) { throw new ArgumentNullException("udpSocket"); } var registry = new UserRegistry(); if (request.Version == VersionCode.V3) { registry.Add(request.Parameters.UserName, request.Privacy); } return request.GetResponse(timeout, receiver, registry, udpSocket); } /// <summary> /// Sends an <see cref="ISnmpMessage"/> and handles the response from agent. /// </summary> /// <param name="request">The <see cref="ISnmpMessage"/>.</param> /// <param name="timeout">The time-out value, in milliseconds. The default value is 0, which indicates an infinite time-out period. Specifying -1 also indicates an infinite time-out period.</param> /// <param name="receiver">Agent.</param> /// <param name="udpSocket">The UDP <see cref="Socket"/> to use to send/receive.</param> /// <param name="registry">The user registry.</param> /// <returns></returns> public static ISnmpMessage GetResponse(this ISnmpMessage request, int timeout, IPEndPoint receiver, UserRegistry registry, Socket udpSocket) { if (request == null) { throw new ArgumentNullException("request"); } if (udpSocket == null) { throw new ArgumentNullException("udpSocket"); } if (receiver == null) { throw new ArgumentNullException("receiver"); } if (registry == null) { throw new ArgumentNullException("registry"); } var requestCode = request.TypeCode(); if (requestCode == SnmpType.TrapV1Pdu || requestCode == SnmpType.TrapV2Pdu || requestCode == SnmpType.ReportPdu) { throw new InvalidOperationException(string.Format(CultureInfo.InvariantCulture, "not a request message: {0}", requestCode)); } var bytes = request.ToBytes(); #if CF int bufSize = 8192; #else var bufSize = udpSocket.ReceiveBufferSize; #endif var reply = new byte[bufSize]; // Whatever you change, try to keep the Send and the Receive close to each other. udpSocket.SendTo(bytes, receiver); #if !CF udpSocket.ReceiveTimeout = timeout; #endif int count; try { count = udpSocket.Receive(reply, 0, bufSize, SocketFlags.None); } catch (SocketException ex) { // FIXME: If you use a Mono build without the fix for this issue (https://bugzilla.novell.com/show_bug.cgi?id=599488), please uncomment this code. /* if (SnmpMessageExtension.IsRunningOnMono && ex.ErrorCode == 10035) { throw TimeoutException.Create(receiver.Address, timeout); } // */ if (ex.ErrorCode == WSAETIMEDOUT) { throw TimeoutException.Create(receiver.Address, timeout); } throw; } // Passing 'count' is not necessary because ParseMessages should ignore it, but it offer extra safety (and would avoid an issue if parsing >1 response). var response = MessageFactory.ParseMessages(reply, 0, count, registry)[0]; var responseCode = response.TypeCode(); if (responseCode == SnmpType.ResponsePdu || responseCode == SnmpType.ReportPdu) { var requestId = request.MessageId(); var responseId = response.MessageId(); if (responseId != requestId) { throw OperationException.Create(string.Format(CultureInfo.InvariantCulture, "wrong response sequence: expected {0}, received {1}", requestId, responseId), receiver.Address); } return response; } throw OperationException.Create(string.Format(CultureInfo.InvariantCulture, "wrong response type: {0}", responseCode), receiver.Address); } /// <summary> /// Sends an <see cref="ISnmpMessage"/> and handles the response from agent. /// </summary> /// <param name="request">The <see cref="ISnmpMessage"/>.</param> /// <param name="receiver">Agent.</param> /// <param name="udpSocket">The UDP <see cref="Socket"/> to use to send/receive.</param> /// <param name="registry">The user registry.</param> /// <returns></returns> public static async Task<ISnmpMessage> GetResponseAsync(this ISnmpMessage request, IPEndPoint receiver, UserRegistry registry, Socket udpSocket) { if (request == null) { throw new ArgumentNullException("request"); } if (udpSocket == null) { throw new ArgumentNullException("udpSocket"); } if (receiver == null) { throw new ArgumentNullException("receiver"); } if (registry == null) { throw new ArgumentNullException("registry"); } var requestCode = request.TypeCode(); if (requestCode == SnmpType.TrapV1Pdu || requestCode == SnmpType.TrapV2Pdu || requestCode == SnmpType.ReportPdu) { throw new InvalidOperationException(string.Format(CultureInfo.InvariantCulture, "not a request message: {0}", requestCode)); } var bytes = request.ToBytes(); #if CF int bufSize = 8192; #else var bufSize = udpSocket.ReceiveBufferSize; #endif var reply = new byte[bufSize]; // Whatever you change, try to keep the Send and the Receive close to each other. udpSocket.SendTo(bytes, receiver); int count; try { // IMPORTANT: follow http://blogs.msdn.com/b/pfxteam/archive/2011/12/15/10248293.aspx var args = new SocketAsyncEventArgs(); args.SetBuffer(reply, 0, bufSize); var awaitable = new SocketAwaitable(args); count = await SocketExtensions.ReceiveAsync(udpSocket, awaitable); } catch (SocketException ex) { // FIXME: If you use a Mono build without the fix for this issue (https://bugzilla.novell.com/show_bug.cgi?id=599488), please uncomment this code. /* if (SnmpMessageExtension.IsRunningOnMono && ex.ErrorCode == 10035) { throw TimeoutException.Create(receiver.Address, timeout); } // */ if (ex.ErrorCode == WSAETIMEDOUT) { throw TimeoutException.Create(receiver.Address, 0); } throw; } // Passing 'count' is not necessary because ParseMessages should ignore it, but it offer extra safety (and would avoid an issue if parsing >1 response). var response = MessageFactory.ParseMessages(reply, 0, count, registry)[0]; var responseCode = response.TypeCode(); if (responseCode == SnmpType.ResponsePdu || responseCode == SnmpType.ReportPdu) { var requestId = request.MessageId(); var responseId = response.MessageId(); if (responseId != requestId) { throw OperationException.Create(string.Format(CultureInfo.InvariantCulture, "wrong response sequence: expected {0}, received {1}", requestId, responseId), receiver.Address); } return response; } throw OperationException.Create(string.Format(CultureInfo.InvariantCulture, "wrong response type: {0}", responseCode), receiver.Address); } /// <summary> /// Ends a pending asynchronous read. /// </summary> /// <param name="request">The <see cref="ISnmpMessage"/>.</param> /// <param name="asyncResult">An <see cref="IAsyncResult"/> that stores state information and any user defined data for this asynchronous operation.</param> /// <returns></returns> [Obsolete("Please use GetResponseAsync and await on it.")] public static ISnmpMessage EndGetResponse(this ISnmpMessage request, IAsyncResult asyncResult) { if (asyncResult == null) { throw new ArgumentNullException("asyncResult"); } if (request == null) { throw new ArgumentNullException("request"); } var ar = (SnmpMessageAsyncResult)asyncResult; var s = ar.WorkSocket; var count = s.EndReceive(ar.Inner); // Passing 'count' is not necessary because ParseMessages should ignore it, but it offer extra safety (and would avoid an issue if parsing >1 response). var response = MessageFactory.ParseMessages(ar.GetBuffer(), 0, count, ar.Users)[0]; var responseCode = response.TypeCode(); if (responseCode == SnmpType.ResponsePdu || responseCode == SnmpType.ReportPdu) { var requestId = request.MessageId(); var responseId = response.MessageId(); if (responseId != requestId) { throw OperationException.Create(string.Format(CultureInfo.InvariantCulture, "wrong response sequence: expected {0}, received {1}", requestId, responseId), ar.Receiver.Address); } return response; } throw OperationException.Create(string.Format(CultureInfo.InvariantCulture, "wrong response type: {0}", responseCode), ar.Receiver.Address); } /// <summary> /// Begins to asynchronously send an <see cref="ISnmpMessage"/> to an <see cref="IPEndPoint"/>. /// </summary> /// <param name="request">The <see cref="ISnmpMessage"/>.</param> /// <param name="receiver">Agent.</param> /// <param name="registry">The user registry.</param> /// <param name="udpSocket">The UDP <see cref="Socket"/> to use to send/receive.</param> /// <param name="callback">The callback.</param> /// <param name="state">The state object.</param> /// <returns></returns> [Obsolete("Please use GetResponseAsync and await on it.")] public static IAsyncResult BeginGetResponse(this ISnmpMessage request, IPEndPoint receiver, UserRegistry registry, Socket udpSocket, AsyncCallback callback, object state) { if (request == null) { throw new ArgumentNullException("request"); } if (udpSocket == null) { throw new ArgumentNullException("udpSocket"); } if (receiver == null) { throw new ArgumentNullException("receiver"); } if (registry == null) { throw new ArgumentNullException("registry"); } var requestCode = request.TypeCode(); if (requestCode == SnmpType.TrapV1Pdu || requestCode == SnmpType.TrapV2Pdu || requestCode == SnmpType.ReportPdu) { throw new InvalidOperationException(string.Format(CultureInfo.InvariantCulture, "not a request message: {0}", requestCode)); } // Whatever you change, try to keep the Send and the Receive close to each other. udpSocket.SendTo(request.ToBytes(), receiver); #if CF var bufferSize = 8192; #else var bufferSize = udpSocket.ReceiveBufferSize; #endif var buffer = new byte[bufferSize]; // http://sharpsnmplib.codeplex.com/workitem/7234 if (callback != null) { AsyncCallback wrapped = callback; callback = asyncResult => { var result = new SnmpMessageAsyncResult(asyncResult, udpSocket, registry, receiver, buffer); wrapped(result); }; } var ar = udpSocket.BeginReceive(buffer, 0, bufferSize, SocketFlags.None, callback, state); return new SnmpMessageAsyncResult(ar, udpSocket, registry, receiver, buffer); } /// <summary> /// Tests if running on Mono. /// </summary> /// <returns></returns> public static bool IsRunningOnMono { get { return Type.GetType("Mono.Runtime") != null; } } /// <summary> /// Packs up the <see cref="ISnmpMessage"/>. /// </summary> /// <param name="message">The <see cref="ISnmpMessage"/>.</param> /// <param name="length">The length bytes.</param> /// <returns></returns> internal static Sequence PackMessage(this ISnmpMessage message, byte[] length) { if (message == null) { throw new ArgumentNullException("message"); } return ByteTool.PackMessage( length, message.Version, message.Header, message.Parameters, message.Privacy.GetScopeData(message.Header, message.Parameters, message.Scope.GetData(message.Version))); } /// <summary> /// http://msdn.microsoft.com/en-us/library/ms740668(VS.85).aspx /// </summary> [SuppressMessage("StyleCop.CSharp.DocumentationRules", "SA1650:ElementDocumentationMustBeSpelledCorrectly", Justification = "Reviewed. Suppression is OK here.")] private const int WSAETIMEDOUT = 10060; private sealed class SnmpMessageAsyncResult : IAsyncResult { private readonly byte[] _buffer; public SnmpMessageAsyncResult(IAsyncResult inner, Socket socket, UserRegistry users, IPEndPoint receiver, byte[] buffer) { _buffer = buffer; WorkSocket = socket; Users = users; Receiver = receiver; Inner = inner; } public IAsyncResult Inner { get; private set; } public Socket WorkSocket { get; private set; } public UserRegistry Users { get; private set; } public byte[] GetBuffer() { return _buffer; } public IPEndPoint Receiver { get; private set; } public bool IsCompleted { get { return Inner.IsCompleted; } } public WaitHandle AsyncWaitHandle { get { return Inner.AsyncWaitHandle; } } public object AsyncState { get { return Inner.AsyncState; } } public bool CompletedSynchronously { get { return Inner.CompletedSynchronously; } } } internal static class SocketExtensions { public static SocketAwaitable ReceiveAsync(Socket socket, SocketAwaitable awaitable) { awaitable.Reset(); if (!socket.ReceiveAsync(awaitable.m_eventArgs)) awaitable.m_wasCompleted = true; return awaitable; } } internal sealed class SocketAwaitable : INotifyCompletion { private readonly static Action SENTINEL = () => { }; internal bool m_wasCompleted; internal Action m_continuation; internal SocketAsyncEventArgs m_eventArgs; public SocketAwaitable(SocketAsyncEventArgs eventArgs) { if (eventArgs == null) throw new ArgumentNullException("eventArgs"); m_eventArgs = eventArgs; eventArgs.Completed += delegate { var prev = m_continuation ?? Interlocked.CompareExchange( ref m_continuation, SENTINEL, null); if (prev != null) prev(); }; } internal void Reset() { m_wasCompleted = false; m_continuation = null; } public SocketAwaitable GetAwaiter() { return this; } public bool IsCompleted { get { return m_wasCompleted; } } public void OnCompleted(Action continuation) { if (m_continuation == SENTINEL || Interlocked.CompareExchange( ref m_continuation, continuation, null) == SENTINEL) { Task.Run(continuation); } } public int GetResult() { if (m_eventArgs.SocketError != SocketError.Success) throw new SocketException((int)m_eventArgs.SocketError); return m_eventArgs.BytesTransferred; } } } }
// // Copyright (c) 2008-2011, Kenneth Bell // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the "Software"), // to deal in the Software without restriction, including without limitation // the rights to use, copy, modify, merge, publish, distribute, sublicense, // and/or sell copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER // DEALINGS IN THE SOFTWARE. // namespace DiscUtils.Partitions { using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Globalization; using System.IO; /// <summary> /// Represents a GUID Partition Table. /// </summary> public sealed class GuidPartitionTable : PartitionTable { private Stream _diskData; private Geometry _diskGeometry; private GptHeader _primaryHeader; private GptHeader _secondaryHeader; private byte[] _entryBuffer; /// <summary> /// Initializes a new instance of the GuidPartitionTable class. /// </summary> /// <param name="disk">The disk containing the partition table</param> public GuidPartitionTable(VirtualDisk disk) { Init(disk.Content, disk.Geometry); } /// <summary> /// Initializes a new instance of the GuidPartitionTable class. /// </summary> /// <param name="disk">The stream containing the disk data</param> /// <param name="diskGeometry">The geometry of the disk</param> public GuidPartitionTable(Stream disk, Geometry diskGeometry) { Init(disk, diskGeometry); } /// <summary> /// Gets the first sector of the disk available to hold partitions. /// </summary> public long FirstUsableSector { get { return _primaryHeader.FirstUsable; } } /// <summary> /// Gets the last sector of the disk available to hold partitions. /// </summary> public long LastUsableSector { get { return _primaryHeader.LastUsable; } } /// <summary> /// Gets the unique GPT identifier for this disk. /// </summary> public override Guid DiskGuid { get { return _primaryHeader.DiskGuid; } } /// <summary> /// Gets a collection of the partitions for storing Operating System file-systems. /// </summary> public override ReadOnlyCollection<PartitionInfo> Partitions { get { return new ReadOnlyCollection<PartitionInfo>(Utilities.Map<GptEntry, GuidPartitionInfo>(GetAllEntries(), (e) => new GuidPartitionInfo(this, e))); } } /// <summary> /// Creates a new partition table on a disk. /// </summary> /// <param name="disk">The disk to initialize.</param> /// <returns>An object to access the newly created partition table</returns> public static GuidPartitionTable Initialize(VirtualDisk disk) { return Initialize(disk.Content, disk.Geometry); } /// <summary> /// Creates a new partition table on a disk. /// </summary> /// <param name="disk">The stream containing the disk data</param> /// <param name="diskGeometry">The geometry of the disk</param> /// <returns>An object to access the newly created partition table</returns> public static GuidPartitionTable Initialize(Stream disk, Geometry diskGeometry) { // Create the protective MBR partition record. BiosPartitionTable pt = BiosPartitionTable.Initialize(disk, diskGeometry); pt.CreatePrimaryByCylinder(0, diskGeometry.Cylinders - 1, BiosPartitionTypes.GptProtective, false); // Create the GPT headers, and blank-out the entry areas const int EntryCount = 128; const int EntrySize = 128; int entrySectors = ((EntryCount * EntrySize) + diskGeometry.BytesPerSector - 1) / diskGeometry.BytesPerSector; byte[] entriesBuffer = new byte[EntryCount * EntrySize]; // Prepare primary header GptHeader header = new GptHeader(diskGeometry.BytesPerSector); header.HeaderLba = 1; header.AlternateHeaderLba = (disk.Length / diskGeometry.BytesPerSector) - 1; header.FirstUsable = header.HeaderLba + entrySectors + 1; header.LastUsable = header.AlternateHeaderLba - entrySectors - 1; header.DiskGuid = Guid.NewGuid(); header.PartitionEntriesLba = 2; header.PartitionEntryCount = EntryCount; header.PartitionEntrySize = EntrySize; header.EntriesCrc = CalcEntriesCrc(entriesBuffer); // Write the primary header byte[] headerBuffer = new byte[diskGeometry.BytesPerSector]; header.WriteTo(headerBuffer, 0); disk.Position = header.HeaderLba * diskGeometry.BytesPerSector; disk.Write(headerBuffer, 0, headerBuffer.Length); // Calc alternate header header.HeaderLba = header.AlternateHeaderLba; header.AlternateHeaderLba = 1; header.PartitionEntriesLba = header.HeaderLba - entrySectors; // Write the alternate header header.WriteTo(headerBuffer, 0); disk.Position = header.HeaderLba * diskGeometry.BytesPerSector; disk.Write(headerBuffer, 0, headerBuffer.Length); return new GuidPartitionTable(disk, diskGeometry); } /// <summary> /// Creates a new partition table on a disk containing a single partition. /// </summary> /// <param name="disk">The disk to initialize.</param> /// <param name="type">The partition type for the single partition</param> /// <returns>An object to access the newly created partition table</returns> public static GuidPartitionTable Initialize(VirtualDisk disk, WellKnownPartitionType type) { GuidPartitionTable pt = Initialize(disk); pt.Create(type, true); return pt; } /// <summary> /// Creates a new partition that encompasses the entire disk. /// </summary> /// <param name="type">The partition type</param> /// <param name="active">Whether the partition is active (bootable)</param> /// <returns>The index of the partition</returns> /// <remarks>The partition table must be empty before this method is called, /// otherwise IOException is thrown.</remarks> public override int Create(WellKnownPartitionType type, bool active) { List<GptEntry> allEntries = new List<GptEntry>(GetAllEntries()); EstablishReservedPartition(allEntries); // Fill the rest of the disk with the requested partition long start = FirstAvailableSector(allEntries); long end = FindLastFreeSector(start, allEntries); return Create(start, end, GuidPartitionTypes.Convert(type), 0, "Data Partition"); } /// <summary> /// Creates a new primary partition with a target size. /// </summary> /// <param name="size">The target size (in bytes)</param> /// <param name="type">The partition type</param> /// <param name="active">Whether the partition is active (bootable)</param> /// <returns>The index of the new partition</returns> public override int Create(long size, WellKnownPartitionType type, bool active) { if (size < _diskGeometry.BytesPerSector) { throw new ArgumentOutOfRangeException("size", size, "size must be at least one sector"); } long sectorLength = size / _diskGeometry.BytesPerSector; long start = FindGap(size / _diskGeometry.BytesPerSector, 1); return Create(start, start + sectorLength - 1, GuidPartitionTypes.Convert(type), 0, "Data Partition"); } /// <summary> /// Creates a new aligned partition that encompasses the entire disk. /// </summary> /// <param name="type">The partition type</param> /// <param name="active">Whether the partition is active (bootable)</param> /// <param name="alignment">The alignment (in bytes)</param> /// <returns>The index of the partition</returns> /// <remarks>The partition table must be empty before this method is called, /// otherwise IOException is thrown.</remarks> /// <remarks> /// Traditionally partitions were aligned to the physical structure of the underlying disk, /// however with modern storage greater efficiency is acheived by aligning partitions on /// large values that are a power of two. /// </remarks> public override int CreateAligned(WellKnownPartitionType type, bool active, int alignment) { if (alignment % _diskGeometry.BytesPerSector != 0) { throw new ArgumentException("Alignment is not a multiple of the sector size"); } List<GptEntry> allEntries = new List<GptEntry>(GetAllEntries()); EstablishReservedPartition(allEntries); // Fill the rest of the disk with the requested partition long start = Utilities.RoundUp(FirstAvailableSector(allEntries), alignment / _diskGeometry.BytesPerSector); long end = Utilities.RoundDown(FindLastFreeSector(start, allEntries) + 1, alignment / _diskGeometry.BytesPerSector); if (end <= start) { throw new IOException("No available space"); } return Create(start, end - 1, GuidPartitionTypes.Convert(type), 0, "Data Partition"); } /// <summary> /// Creates a new aligned partition with a target size. /// </summary> /// <param name="size">The target size (in bytes)</param> /// <param name="type">The partition type</param> /// <param name="active">Whether the partition is active (bootable)</param> /// <param name="alignment">The alignment (in bytes)</param> /// <returns>The index of the new partition</returns> /// <remarks> /// Traditionally partitions were aligned to the physical structure of the underlying disk, /// however with modern storage greater efficiency is acheived by aligning partitions on /// large values that are a power of two. /// </remarks> public override int CreateAligned(long size, WellKnownPartitionType type, bool active, int alignment) { if (size < _diskGeometry.BytesPerSector) { throw new ArgumentOutOfRangeException("size", size, "size must be at least one sector"); } if (alignment % _diskGeometry.BytesPerSector != 0) { throw new ArgumentException("Alignment is not a multiple of the sector size"); } if (size % alignment != 0) { throw new ArgumentException("Size is not a multiple of the alignment"); } long sectorLength = size / _diskGeometry.BytesPerSector; long start = FindGap(size / _diskGeometry.BytesPerSector, alignment / _diskGeometry.BytesPerSector); return Create(start, start + sectorLength - 1, GuidPartitionTypes.Convert(type), 0, "Data Partition"); } /// <summary> /// Creates a new GUID partition on the disk. /// </summary> /// <param name="startSector">The first sector of the partition.</param> /// <param name="endSector">The last sector of the partition.</param> /// <param name="type">The partition type</param> /// <param name="attributes">The partition attributes</param> /// <param name="name">The name of the partition</param> /// <returns>The index of the new partition</returns> /// <remarks>No checking is performed on the parameters, the caller is /// responsible for ensuring that the partition does not overlap other partitions.</remarks> public int Create(long startSector, long endSector, Guid type, long attributes, string name) { GptEntry newEntry = CreateEntry(startSector, endSector, type, attributes, name); return GetEntryIndex(newEntry.Identity); } /// <summary> /// Deletes a partition at a given index. /// </summary> /// <param name="index">The index of the partition</param> public override void Delete(int index) { int offset = GetPartitionOffset(index); Array.Clear(_entryBuffer, offset, _primaryHeader.PartitionEntrySize); Write(); } internal SparseStream Open(GptEntry entry) { long start = entry.FirstUsedLogicalBlock * _diskGeometry.BytesPerSector; long end = entry.LastUsedLogicalBlock * _diskGeometry.BytesPerSector; return new SubStream(_diskData, start, end - start); } private static uint CalcEntriesCrc(byte[] buffer) { return Crc32LittleEndian.Compute(Crc32Algorithm.Common, buffer, 0, buffer.Length); } private static int CountEntries<T>(ICollection<T> values, Func<T, bool> pred) { int count = 0; foreach (var val in values) { if (pred(val)) { ++count; } } return count; } private void Init(Stream disk, Geometry diskGeometry) { BiosPartitionTable bpt; try { bpt = new BiosPartitionTable(disk, diskGeometry); } catch (IOException ioe) { throw new IOException("Invalid GPT disk, protective MBR table not present or invalid", ioe); } if (bpt.Count != 1 || bpt[0].BiosType != BiosPartitionTypes.GptProtective) { throw new IOException("Invalid GPT disk, protective MBR table is not valid"); } _diskData = disk; _diskGeometry = diskGeometry; disk.Position = diskGeometry.BytesPerSector; byte[] sector = Utilities.ReadFully(disk, diskGeometry.BytesPerSector); _primaryHeader = new GptHeader(diskGeometry.BytesPerSector); if (!_primaryHeader.ReadFrom(sector, 0) || !ReadEntries(_primaryHeader)) { disk.Position = disk.Length - diskGeometry.BytesPerSector; disk.Read(sector, 0, sector.Length); _secondaryHeader = new GptHeader(diskGeometry.BytesPerSector); if (!_secondaryHeader.ReadFrom(sector, 0) || !ReadEntries(_secondaryHeader)) { throw new IOException("No valid GUID Partition Table found"); } // Generate from the primary table from the secondary one _primaryHeader = new GptHeader(_secondaryHeader); _primaryHeader.HeaderLba = _secondaryHeader.AlternateHeaderLba; _primaryHeader.AlternateHeaderLba = _secondaryHeader.HeaderLba; _primaryHeader.PartitionEntriesLba = 2; // If the disk is writeable, fix up the primary partition table based on the // (valid) secondary table. if (disk.CanWrite) { WritePrimaryHeader(); } } if (_secondaryHeader == null) { _secondaryHeader = new GptHeader(diskGeometry.BytesPerSector); disk.Position = disk.Length - diskGeometry.BytesPerSector; disk.Read(sector, 0, sector.Length); if (!_secondaryHeader.ReadFrom(sector, 0) || !ReadEntries(_secondaryHeader)) { // Generate from the secondary table from the primary one _secondaryHeader = new GptHeader(_primaryHeader); _secondaryHeader.HeaderLba = _secondaryHeader.AlternateHeaderLba; _secondaryHeader.AlternateHeaderLba = _secondaryHeader.HeaderLba; _secondaryHeader.PartitionEntriesLba = _secondaryHeader.HeaderLba - Utilities.RoundUp(_secondaryHeader.PartitionEntryCount * _secondaryHeader.PartitionEntrySize, diskGeometry.BytesPerSector); // If the disk is writeable, fix up the secondary partition table based on the // (valid) primary table. if (disk.CanWrite) { WriteSecondaryHeader(); } } } } private void EstablishReservedPartition(List<GptEntry> allEntries) { // If no MicrosoftReserved partition, and no Microsoft Data partitions, and the disk // has a 'reasonable' size free, create a Microsoft Reserved partition. if (CountEntries(allEntries, e => e.PartitionType == GuidPartitionTypes.MicrosoftReserved) == 0 && CountEntries(allEntries, e => e.PartitionType == GuidPartitionTypes.WindowsBasicData) == 0 && _diskGeometry.Capacity > 512 * 1024 * 1024) { long reservedStart = FirstAvailableSector(allEntries); long reservedEnd = FindLastFreeSector(reservedStart, allEntries); if ((reservedEnd - reservedStart + 1) * _diskGeometry.BytesPerSector > 512 * 1024 * 1024) { long size = ((_diskGeometry.Capacity < (16 * 1024L * 1024 * 1024)) ? 32 : 128) * 1024 * 1024; reservedEnd = reservedStart + (size / _diskGeometry.BytesPerSector) - 1; int reservedOffset = GetFreeEntryOffset(); GptEntry newReservedEntry = new GptEntry(); newReservedEntry.PartitionType = GuidPartitionTypes.MicrosoftReserved; newReservedEntry.Identity = Guid.NewGuid(); newReservedEntry.FirstUsedLogicalBlock = reservedStart; newReservedEntry.LastUsedLogicalBlock = reservedEnd; newReservedEntry.Attributes = 0; newReservedEntry.Name = "Microsoft reserved partition"; newReservedEntry.WriteTo(_entryBuffer, reservedOffset); allEntries.Add(newReservedEntry); } } } private GptEntry CreateEntry(long startSector, long endSector, Guid type, long attributes, string name) { if (endSector < startSector) { throw new ArgumentException("The end sector is before the start sector"); } int offset = GetFreeEntryOffset(); GptEntry newEntry = new GptEntry(); newEntry.PartitionType = type; newEntry.Identity = Guid.NewGuid(); newEntry.FirstUsedLogicalBlock = startSector; newEntry.LastUsedLogicalBlock = endSector; newEntry.Attributes = (ulong)attributes; newEntry.Name = name; newEntry.WriteTo(_entryBuffer, offset); // Commit changes to disk Write(); return newEntry; } private long FindGap(long numSectors, long alignmentSectors) { List<GptEntry> list = new List<GptEntry>(GetAllEntries()); list.Sort(); long startSector = Utilities.RoundUp(_primaryHeader.FirstUsable, alignmentSectors); foreach (var entry in list) { if (!Utilities.RangesOverlap(startSector, startSector + numSectors - 1, entry.FirstUsedLogicalBlock, entry.LastUsedLogicalBlock)) { break; } else { startSector = Utilities.RoundUp(entry.LastUsedLogicalBlock + 1, alignmentSectors); } } if (_diskGeometry.TotalSectors - startSector < numSectors) { throw new IOException(string.Format(CultureInfo.InvariantCulture, "Unable to find free space of {0} sectors", numSectors)); } return startSector; } private long FirstAvailableSector(List<GptEntry> allEntries) { long start = _primaryHeader.FirstUsable; foreach (GptEntry entry in allEntries) { if (entry.LastUsedLogicalBlock >= start) { start = entry.LastUsedLogicalBlock + 1; } } return start; } private long FindLastFreeSector(long start, List<GptEntry> allEntries) { long end = _primaryHeader.LastUsable; foreach (GptEntry entry in allEntries) { if (entry.LastUsedLogicalBlock > start && entry.FirstUsedLogicalBlock <= end) { end = entry.FirstUsedLogicalBlock - 1; } } return end; } private void Write() { WritePrimaryHeader(); WriteSecondaryHeader(); } private void WritePrimaryHeader() { byte[] buffer = new byte[_diskGeometry.BytesPerSector]; _primaryHeader.EntriesCrc = CalcEntriesCrc(); _primaryHeader.WriteTo(buffer, 0); _diskData.Position = _diskGeometry.BytesPerSector; _diskData.Write(buffer, 0, buffer.Length); _diskData.Position = 2 * _diskGeometry.BytesPerSector; _diskData.Write(_entryBuffer, 0, _entryBuffer.Length); } private void WriteSecondaryHeader() { byte[] buffer = new byte[_diskGeometry.BytesPerSector]; _secondaryHeader.EntriesCrc = CalcEntriesCrc(); _secondaryHeader.WriteTo(buffer, 0); _diskData.Position = _diskData.Length - _diskGeometry.BytesPerSector; _diskData.Write(buffer, 0, buffer.Length); _diskData.Position = _secondaryHeader.PartitionEntriesLba * _diskGeometry.BytesPerSector; _diskData.Write(_entryBuffer, 0, _entryBuffer.Length); } private bool ReadEntries(GptHeader header) { _diskData.Position = header.PartitionEntriesLba * _diskGeometry.BytesPerSector; _entryBuffer = Utilities.ReadFully(_diskData, (int)(header.PartitionEntrySize * header.PartitionEntryCount)); if (header.EntriesCrc != CalcEntriesCrc()) { return false; } return true; } private uint CalcEntriesCrc() { return Crc32LittleEndian.Compute(Crc32Algorithm.Common, _entryBuffer, 0, _entryBuffer.Length); } private IEnumerable<GptEntry> GetAllEntries() { for (int i = 0; i < _primaryHeader.PartitionEntryCount; ++i) { GptEntry entry = new GptEntry(); entry.ReadFrom(_entryBuffer, i * _primaryHeader.PartitionEntrySize); if (entry.PartitionType != Guid.Empty) { yield return entry; } } } private int GetPartitionOffset(int index) { bool found = false; int entriesSoFar = 0; int position = 0; while (!found && position < _primaryHeader.PartitionEntryCount) { GptEntry entry = new GptEntry(); entry.ReadFrom(_entryBuffer, position * _primaryHeader.PartitionEntrySize); if (entry.PartitionType != Guid.Empty) { if (index == entriesSoFar) { found = true; break; } entriesSoFar++; } position++; } if (found) { return position * _primaryHeader.PartitionEntrySize; } else { throw new IOException(string.Format(CultureInfo.InvariantCulture, "No such partition: {0}", index)); } } private int GetEntryIndex(Guid identity) { int index = 0; for (int i = 0; i < _primaryHeader.PartitionEntryCount; ++i) { GptEntry entry = new GptEntry(); entry.ReadFrom(_entryBuffer, i * _primaryHeader.PartitionEntrySize); if (entry.Identity == identity) { return index; } else if (entry.PartitionType != Guid.Empty) { index++; } } throw new IOException("No such partition"); } private int GetFreeEntryOffset() { for (int i = 0; i < _primaryHeader.PartitionEntryCount; ++i) { GptEntry entry = new GptEntry(); entry.ReadFrom(_entryBuffer, i * _primaryHeader.PartitionEntrySize); if (entry.PartitionType == Guid.Empty) { return i * _primaryHeader.PartitionEntrySize; } } throw new IOException("No free partition entries available"); } } }
// Created by Paul Gonzalez Becerra using System; using Saserdote.Mathematics; namespace Saserdote.Mathematics.Collision { public class BVRectangle:BoundingVolume { #region --- Field Variables --- // Variables private Point2f pMin; private Point2f pMax; private Point2f pPos; #endregion // Field Variables #region --- Constructors --- public BVRectangle(Point2f pmMin, Point2f pmMax) { pMin= pmMin; pMax= pmMax; pPos= pMax|pMin; } public BVRectangle(float x, float y, float w, float h):this(new Point2f(x, y), new Point2f(x+w, y+h)) {} #endregion // Constructors #region --- Properties --- // Gets and sets the top-left point of the rectangle public Point2f min { get { return pMin; } set { pMin= value; pPos= pMax|pMin; } } // Gets and sets the bottom-right point of the rectangle public Point2f max { get { return pMax; } set { pMax= value; pPos= pMax|pMin; } } // Gets the size of the rectangle public Size2f size { get { return new Size2f(pMax.x-pMin.x, pMax.y-pMin.y); } } #endregion // Properties #region --- Inherited Properties --- // Gets the points of the bounding volume public override Point3f[] points { get { return new Point3f[] { new Point3f(pMin.x, pMin.y, 0), new Point3f(pMax.x, pMin.y, 0), new Point3f(pMax.x, pMax.y, 0), new Point3f(pMin.x, pMax.y, 0) }; } } // Gets and sets the position of the bounding volume public override Point3f position { get { return (Point3f)pPos; } set { // Variables Vector2 vec= (pMax-pMin)/2f; Point2f valuex= (Point2f)value; pPos= valuex; pMin= valuex-vec; pMax= valuex+vec; } } #endregion // Inherited Properties #region --- Static Methods --- // Creates a bounding volume rectangle from the given 2d points public static BVRectangle createFromPoints(Point2f[] vertices) { if(vertices== null) throw new ArgumentNullException(); if(vertices.Length== 0) throw new ArgumentException("No vertices declared"); // Variables Point2f pmMin= new Point2f(float.MinValue); Point2f pmMax= new Point2f(float.MaxValue); for(int i= 0; i< vertices.Length; i++) { pmMin= Mathx.min(vertices[i], pmMin); pmMax= Mathx.max(vertices[i], pmMax); } return new BVRectangle(pmMin, pmMax); } // Creates a bounding volume rectangle from the given 3d points public static BVRectangle createFromPoints(Point3f[] vertices) { if(vertices== null) throw new ArgumentNullException(); // Variables Point2f[] results= new Point2f[vertices.Length]; for(int i= 0; i< vertices.Length; i++) { results[i].x= vertices[i].x; results[i].y= vertices[i].y; } return createFromPoints(results); } #endregion // Static Methods #region --- Methods --- // Finds if the two rectangles are intersecting each other public bool intersects(ref BVRectangle collider) { if(pMax.x< collider.pMin.x || pMin.x> collider.pMax.x) return false; if(pMax.y< collider.pMin.y || pMin.y> collider.pMax.y) return false; return true; } public bool intersects(BVRectangle collider) { return intersects(ref collider); } // Finds if the rectangle and the circle collide together public bool intersects(ref BVCircle collider) { // Variables float n= 0f; Point3f[] p= points; for(int i= 0; i< p.Length; i++) { if(collider.contains(ref p[i])) return true; } if(collider.pPos.x< pMin.x) n+= (collider.pPos.x-pMin.x)*(collider.pPos.x-pMin.x); else if(collider.pPos.x> pMax.x) n+= (collider.pPos.x-pMax.x)*(collider.pPos.x-pMax.x); if(collider.pPos.y< pMin.y) n+= (collider.pPos.y-pMin.y)*(collider.pPos.y-pMin.y); else if(collider.pPos.y> pMax.y) n+= (collider.pPos.y-pMax.y)*(collider.pPos.y-pMax.y); if(n<= (collider.radius*collider.radius)) return true; return false; } public bool intersects(BVCircle collider) { return intersects(ref collider); } // Finds if the two coord floating points are inside the rectangle public bool contains(float x, float y) { if(pMax.x< x || pMin.x> x) return false; if(pMax.y< y || pMin.y> y) return false; return true; } #endregion // Methods #region --- Inherited Methods --- // Finds if the two bounding volumes intersect each other public override bool intersects(ref BoundingVolume collider) { if(collider== null) return false; if(collider is BVRectangle) { // Variables BVRectangle rect= (BVRectangle)collider; return intersects(ref rect); } if(collider is BVCircle) { // Variables BVCircle circ= (BVCircle)collider; return intersects(ref circ); } return false; } // Finds if the given 2d vector is inside the volume public override bool contains(ref Vector2 vec) { return contains(vec.x, vec.y); } // Finds if the given 3d vector is inside the volume public override bool contains(ref Vector3 vec) { return contains(vec.x, vec.y); } // Finds if the given 2d point is inside the volume public override bool contains(ref Point2i pt) { return contains((float)pt.x, (float)pt.y); } public override bool contains(ref Point2f pt) { return contains(pt.x, pt.y); } // Finds if the given 3d point is inside the volume public override bool contains(ref Point3i pt) { return contains((float)pt.x, (float)pt.y); } public override bool contains(ref Point3f pt) { return contains(pt.x, pt.y); } // Gets the indices to make a line mesh out of it later in Saserdote public override ushort[] getRendableLineIndices() { return new ushort[] { 0, 3, 3, 2, 2, 1, 1, 0 }; } #endregion // Inherited Methods } } // End of File
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Diagnostics; using System.Linq; namespace Microsoft.CodeAnalysis.Differencing { public sealed partial class Match<TNode> { private const double ExactMatchDistance = 0.0; private const double EpsilonDistance = 0.00001; private const double MatchingDistance1 = 0.25; private const double MatchingDistance2 = 0.5; private const double MaxDistance = 1.0; private readonly TreeComparer<TNode> _comparer; private readonly TNode _root1; private readonly TNode _root2; private readonly Dictionary<TNode, TNode> _oneToTwo; private readonly Dictionary<TNode, TNode> _twoToOne; internal Match(TNode root1, TNode root2, TreeComparer<TNode> comparer, IEnumerable<KeyValuePair<TNode, TNode>> knownMatches) { _root1 = root1; _root2 = root2; _comparer = comparer; int labelCount = comparer.LabelCount; CategorizeNodesByLabels(comparer, root1, labelCount, out var nodes1, out var count1); CategorizeNodesByLabels(comparer, root2, labelCount, out var nodes2, out var count2); _oneToTwo = new Dictionary<TNode, TNode>(); _twoToOne = new Dictionary<TNode, TNode>(); // Root nodes always match. Add them before adding known matches to make sure we always have root mapping. TryAdd(root1, root2); if (knownMatches != null) { foreach (var knownMatch in knownMatches) { if (comparer.GetLabel(knownMatch.Key) != comparer.GetLabel(knownMatch.Value)) { throw new ArgumentException(string.Format(WorkspacesResources.Matching_nodes_0_and_1_must_have_the_same_label, knownMatch.Key, knownMatch.Value), nameof(knownMatches)); } if (!comparer.TreesEqual(knownMatch.Key, root1)) { throw new ArgumentException(string.Format(WorkspacesResources.Node_0_must_be_contained_in_the_old_tree, knownMatch.Key), nameof(knownMatches)); } if (!comparer.TreesEqual(knownMatch.Value, root2)) { throw new ArgumentException(string.Format(WorkspacesResources.Node_0_must_be_contained_in_the_new_tree, knownMatch.Value), nameof(knownMatches)); } // skip pairs whose key or value is already mapped: TryAdd(knownMatch.Key, knownMatch.Value); } } ComputeMatch(nodes1, nodes2); } private static void CategorizeNodesByLabels( TreeComparer<TNode> comparer, TNode root, int labelCount, out List<TNode>[] nodes, out int totalCount) { nodes = new List<TNode>[labelCount]; int count = 0; // It is important that we add the nodes in depth-first prefix order. // This order ensures that a node of a certain kind can have a parent of the same kind // and we can still use tied-to-parent for that kind. That's because the parent will always // be processed earlier than the child due to depth-first prefix ordering. foreach (TNode node in comparer.GetDescendants(root)) { int label = comparer.GetLabel(node); if (label < 0 || label >= labelCount) { throw new InvalidOperationException(string.Format(WorkspacesResources.Label_for_node_0_is_invalid_it_must_be_within_bracket_0_1, node, labelCount)); } var list = nodes[label]; if (list == null) { nodes[label] = list = new List<TNode>(); } list.Add(node); count++; } totalCount = count; } private void ComputeMatch(List<TNode>[] nodes1, List<TNode>[] nodes2) { Debug.Assert(nodes1.Length == nodes2.Length); // --- The original FastMatch algorithm --- // // For each leaf label l, and then for each internal node label l do: // a) S1 := chain T1(l) // b) S2 := chain T2(l) // c) lcs := LCS(S1, S2, Equal) // d) For each pair of nodes (x,y) in lcs add (x,y) to M. // e) Pair unmatched nodes with label l as in Algorithm Match, adding matches to M: // For each unmatched node x in T1, if there is an unmatched node y in T2 such that equal(x,y) // then add (x,y) to M. // // equal(x,y) is defined as follows: // x, y are leafs => equal(x,y) := label(x) == label(y) && compare(value(x), value(y)) <= f // x, y are nodes => equal(x,y) := label(x) == label(y) && |common(x,y)| / max(|x|, |y|) > t // where f, t are constants. // // --- Actual implementation --- // // We also categorize nodes by their labels, but then we proceed differently: // // 1) A label may be marked "tied to parent". Let x, y have both label l and l is "tied to parent". // Then (x,y) can be in M only if (parent(x), parent(y)) in M. // Thus we require labels of children tied to a parent to be preceded by all their possible parent labels. // // 2) Rather than defining function equal in terms of constants f and t, which are hard to get right, // we try to match multiple times with different threshold for node distance. // The comparer defines the distance [0..1] between two nodes and it can do so by analyzing // the node structure and value. The comparer can tune the distance specifically for each node kind. // We first try to match nodes of the same labels to the exactly matching or almost matching counterparts. // The we keep increasing the threshold and keep adding matches. for (int l = 0; l < nodes1.Length; l++) { if (nodes1[l] != null && nodes2[l] != null) { ComputeMatchForLabel(l, nodes1[l], nodes2[l]); } } } private void ComputeMatchForLabel(int label, List<TNode> s1, List<TNode> s2) { int tiedToAncestor = _comparer.TiedToAncestor(label); ComputeMatchForLabel(s1, s2, tiedToAncestor, EpsilonDistance); // almost exact match ComputeMatchForLabel(s1, s2, tiedToAncestor, MatchingDistance1); // ok match ComputeMatchForLabel(s1, s2, tiedToAncestor, MatchingDistance2); // ok match ComputeMatchForLabel(s1, s2, tiedToAncestor, MaxDistance); // any match } private void ComputeMatchForLabel(List<TNode> s1, List<TNode> s2, int tiedToAncestor, double maxAcceptableDistance) { // Obviously, the algorithm below is O(n^2). However, in the common case, the 2 lists will // be sequences that exactly match. The purpose of "firstNonMatch2" is to reduce the complexity // to O(n) in this case. Basically, the pointer is the 1st non-matched node in the list of nodes of tree2 // with the given label. // Whenever we match to firstNonMatch2 we set firstNonMatch2 to the subsequent node. // So in the case of totally matching sequences, we process them in O(n) - // both node1 and firstNonMatch2 will be advanced simultaneously. Debug.Assert(maxAcceptableDistance >= ExactMatchDistance && maxAcceptableDistance <= MaxDistance); int count1 = s1.Count; int count2 = s2.Count; int firstNonMatch2 = 0; for (int i1 = 0; i1 < count1; i1++) { TNode node1 = s1[i1]; // Skip this guy if it already has a partner if (HasPartnerInTree2(node1)) { continue; } // Find node2 that matches node1 the best, i.e. has minimal distance. double bestDistance = MaxDistance * 2; TNode bestMatch = default(TNode); bool matched = false; int i2; for (i2 = firstNonMatch2; i2 < count2; i2++) { TNode node2 = s2[i2]; // Skip this guy if it already has a partner if (HasPartnerInTree1(node2)) { continue; } // this requires parents to be processed before their children: if (tiedToAncestor > 0) { // TODO (tomat): For nodes tied to their parents, // consider avoiding matching them to all other nodes of the same label. // Rather we should only match them with their siblings that share the same parent. var ancestor1 = _comparer.GetAncestor(node1, tiedToAncestor); var ancestor2 = _comparer.GetAncestor(node2, tiedToAncestor); // Since CategorizeNodesByLabels added nodes to the s1/s2 lists in depth-first prefix order, // we can also accept equality in the following condition. That's because we find the partner // of the parent node before we get to finding it for the child node of the same kind. Debug.Assert(_comparer.GetLabel(ancestor1) <= _comparer.GetLabel(node1)); if (!Contains(ancestor1, ancestor2)) { continue; } } // We know that // 1. (node1, node2) not in M // 2. Both of their parents are matched to the same parent (or are not matched) // // Now, we have no other choice than comparing the node "values" // and looking for the one with the smaller distance. double distance = _comparer.GetDistance(node1, node2); if (distance < bestDistance) { matched = true; bestMatch = node2; bestDistance = distance; // We only stop if we've got an exact match. This is to resolve the problem // of entities with identical names(name is often used as the "value" of a // node) but with different "sub-values" (e.g. two locals may have the same name // but different types. Since the type is not part of the value, we don't want // to stop looking for the best match if we don't have an exact match). if (distance == ExactMatchDistance) { break; } } } if (matched && bestDistance <= maxAcceptableDistance) { bool added = TryAdd(node1, bestMatch); // We checked above that node1 doesn't have a partner. // The map is a bijection by construction, so we should be able to add the mapping. Debug.Assert(added); // If we exactly matched to firstNonMatch2 we can advance it. if (i2 == firstNonMatch2) { firstNonMatch2 = i2 + 1; } if (firstNonMatch2 == count2) { return; } } } } internal bool TryAdd(TNode node1, TNode node2) { Debug.Assert(_comparer.TreesEqual(node1, _root1)); Debug.Assert(_comparer.TreesEqual(node2, _root2)); if (_oneToTwo.ContainsKey(node1) || _twoToOne.ContainsKey(node2)) { return false; } _oneToTwo.Add(node1, node2); _twoToOne.Add(node2, node1); return true; } internal bool TryGetPartnerInTree1(TNode node2, out TNode partner1) { bool result = _twoToOne.TryGetValue(node2, out partner1); Debug.Assert(_comparer.TreesEqual(node2, _root2)); Debug.Assert(!result || _comparer.TreesEqual(partner1, _root1)); return result; } internal bool HasPartnerInTree1(TNode node2) { Debug.Assert(_comparer.TreesEqual(node2, _root2)); return _twoToOne.ContainsKey(node2); } internal bool TryGetPartnerInTree2(TNode node1, out TNode partner2) { bool result = _oneToTwo.TryGetValue(node1, out partner2); Debug.Assert(_comparer.TreesEqual(node1, _root1)); Debug.Assert(!result || _comparer.TreesEqual(partner2, _root2)); return result; } internal bool HasPartnerInTree2(TNode node1) { Debug.Assert(_comparer.TreesEqual(node1, _root1)); return _oneToTwo.ContainsKey(node1); } internal bool Contains(TNode node1, TNode node2) { Debug.Assert(_comparer.TreesEqual(node2, _root2)); return TryGetPartnerInTree2(node1, out var partner2) && node2.Equals(partner2); } public TreeComparer<TNode> Comparer => _comparer; public TNode OldRoot => _root1; public TNode NewRoot => _root2; public IReadOnlyDictionary<TNode, TNode> Matches { get { return new ReadOnlyDictionary<TNode, TNode>(_oneToTwo); } } public IReadOnlyDictionary<TNode, TNode> ReverseMatches { get { return new ReadOnlyDictionary<TNode, TNode>(_twoToOne); } } public bool TryGetNewNode(TNode oldNode, out TNode newNode) { return _oneToTwo.TryGetValue(oldNode, out newNode); } public bool TryGetOldNode(TNode newNode, out TNode oldNode) { return _twoToOne.TryGetValue(newNode, out oldNode); } /// <summary> /// Returns an edit script (a sequence of edits) that transform <see cref="OldRoot"/> subtree /// to <see cref="NewRoot"/> subtree. /// </summary> public EditScript<TNode> GetTreeEdits() { return new EditScript<TNode>(this); } /// <summary> /// Returns an edit script (a sequence of edits) that transform a sequence of nodes <paramref name="oldNodes"/> /// to a sequence of nodes <paramref name="newNodes"/>. /// </summary> /// <exception cref="ArgumentNullException"><paramref name="oldNodes"/> or <paramref name="newNodes"/> is a null reference.</exception> public IEnumerable<Edit<TNode>> GetSequenceEdits(IEnumerable<TNode> oldNodes, IEnumerable<TNode> newNodes) { if (oldNodes == null) { throw new ArgumentNullException(nameof(oldNodes)); } if (newNodes == null) { throw new ArgumentNullException(nameof(newNodes)); } var oldList = (oldNodes as IReadOnlyList<TNode>) ?? oldNodes.ToList(); var newList = (newNodes as IReadOnlyList<TNode>) ?? newNodes.ToList(); return new LongestCommonSubsequence(this).GetEdits(oldList, newList); } } }
/* * Copyright (c) Contributors, http://aurora-sim.org/, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the Aurora-Sim Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.Linq; using System.Reflection; using Nini.Config; using OpenMetaverse; using Aurora.Framework; using OpenSim.Region.Framework.Interfaces; using OpenSim.Region.Framework.Scenes; using OpenSim.Region.Framework.Scenes.Serialization; namespace Aurora.Modules.Attachments { public class AttachmentsModule : IAttachmentsModule, INonSharedRegionModule { #region Declares protected IScene m_scene = null; protected bool m_allowMultipleAttachments = true; protected int m_maxNumberOfAttachments = 38; public string Name { get { return "Attachments Module"; } } public Type ReplaceableInterface { get { return null; } } public IAvatarFactory AvatarFactory = null; #endregion #region INonSharedRegionModule Methods public void Initialise(IConfigSource source) { if (source.Configs["Attachments"] != null) { m_maxNumberOfAttachments = source.Configs["Attachments"].GetInt("MaxNumberOfAttachments", m_maxNumberOfAttachments); m_allowMultipleAttachments = source.Configs["Attachments"].GetBoolean("EnableMultipleAttachments", m_allowMultipleAttachments); } } public void AddRegion (IScene scene) { m_scene = scene; m_scene.RegisterModuleInterface<IAttachmentsModule>(this); m_scene.EventManager.OnNewClient += SubscribeToClientEvents; m_scene.EventManager.OnClosingClient += UnsubscribeFromClientEvents; m_scene.EventManager.OnMakeRootAgent += ResumeAvatar; m_scene.EventManager.OnAgentFailedToLeave += ResumeAvatar; m_scene.EventManager.OnSetAgentLeaving += AgentIsLeaving; } public void RemoveRegion (IScene scene) { m_scene.UnregisterModuleInterface<IAttachmentsModule>(this); m_scene.EventManager.OnNewClient -= SubscribeToClientEvents; m_scene.EventManager.OnClosingClient -= UnsubscribeFromClientEvents; m_scene.EventManager.OnMakeRootAgent -= ResumeAvatar; m_scene.EventManager.OnAgentFailedToLeave -= ResumeAvatar; m_scene.EventManager.OnSetAgentLeaving -= AgentIsLeaving; } public void RegionLoaded (IScene scene) { AvatarFactory = scene.RequestModuleInterface<IAvatarFactory>(); } public void Close() { RemoveRegion(m_scene); } protected void AgentIsLeaving(IScenePresence presence, OpenSim.Services.Interfaces.GridRegion destination) { //If its a root agent, we need to save all attachments as well if(!presence.IsChildAgent) SuspendAvatar(presence, destination); } #endregion #region Suspend/Resume avatars private Dictionary<UUID, List<IScenePresence>> _usersToSendAttachmentsToWhenLoaded = new Dictionary<UUID, List<IScenePresence>>(); public void ResumeAvatar(IScenePresence presence) { Util.FireAndForget(delegate { IAvatarAppearanceModule appearance = presence.RequestModuleInterface<IAvatarAppearanceModule>(); if (null == appearance || null == appearance.Appearance) { MainConsole.Instance.WarnFormat("[ATTACHMENT]: Appearance has not been initialized for agent {0}", presence.UUID); return; } //Create the avatar attachments plugin for the av AvatarAttachments attachmentsPlugin = new AvatarAttachments(presence); presence.RegisterModuleInterface(attachmentsPlugin); List<AvatarAttachment> attachments = appearance.Appearance.GetAttachments(); foreach (AvatarAttachment attach in attachments) { try { RezSingleAttachmentFromInventory(presence.ControllingClient, attach.ItemID, attach.AssetID, 0, false); } catch (Exception e) { MainConsole.Instance.ErrorFormat("[ATTACHMENT]: Unable to rez attachment: {0}", e); } } presence.AttachmentsLoaded = true; lock (_usersToSendAttachmentsToWhenLoaded) { if (_usersToSendAttachmentsToWhenLoaded.ContainsKey(presence.UUID)) { foreach (var id in _usersToSendAttachmentsToWhenLoaded[presence.UUID]) { SendAttachmentsToPresence(id, presence); } _usersToSendAttachmentsToWhenLoaded.Remove(presence.UUID); } } }); } public void SendAttachmentsToPresence(IScenePresence receiver, IScenePresence sender) { if (sender.AttachmentsLoaded) { ISceneEntity[] entities = GetAttachmentsForAvatar(sender.UUID); foreach (ISceneEntity entity in entities) { receiver.SceneViewer.QueuePartForUpdate(entity.RootChild, PrimUpdateFlags.ForcedFullUpdate); #if (!ISWIN) foreach (ISceneChildEntity child in entity.ChildrenEntities()) { if (!child.IsRoot) { receiver.SceneViewer.QueuePartForUpdate(child, PrimUpdateFlags.ForcedFullUpdate); } } #else foreach (ISceneChildEntity child in entity.ChildrenEntities().Where(child => !child.IsRoot)) { receiver.SceneViewer.QueuePartForUpdate(child, PrimUpdateFlags.ForcedFullUpdate); } #endif } } else { lock (_usersToSendAttachmentsToWhenLoaded) if (_usersToSendAttachmentsToWhenLoaded.ContainsKey(sender.UUID)) _usersToSendAttachmentsToWhenLoaded[sender.UUID].Add(receiver); else _usersToSendAttachmentsToWhenLoaded.Add(sender.UUID, new List<IScenePresence>() { receiver }); } } public void SuspendAvatar(IScenePresence presence, OpenSim.Services.Interfaces.GridRegion destination) { presence.AttachmentsLoaded = false; ISceneEntity[] attachments = GetAttachmentsForAvatar(presence.UUID); foreach (ISceneEntity group in attachments) { if (group.RootChild.AttachedPos != group.RootChild.SavedAttachedPos || group.RootChild.SavedAttachmentPoint != group.RootChild.AttachmentPoint) { group.RootChild.SavedAttachedPos = group.RootChild.AttachedPos; group.RootChild.SavedAttachmentPoint = group.RootChild.AttachmentPoint; //Make sure we get updated group.HasGroupChanged = true; } // If an item contains scripts, it's always changed. // This ensures script state is saved on detach foreach (ISceneChildEntity p in group.ChildrenEntities()) { if (p.Inventory.ContainsScripts()) { group.HasGroupChanged = true; break; } } if (group.HasGroupChanged) { UUID assetID = UpdateKnownItem(presence.ControllingClient, group, group.RootChild.FromUserInventoryItemID, group.OwnerID); group.RootChild.FromUserInventoryAssetID = assetID; } } IAvatarAppearanceModule appearance = presence.RequestModuleInterface<IAvatarAppearanceModule>(); if (appearance != null) appearance.Appearance.SetAttachments(attachments); IBackupModule backup = presence.Scene.RequestModuleInterface<IBackupModule>(); if (backup != null) { bool sendUpdates = destination == null; if (!sendUpdates) { List<OpenSim.Services.Interfaces.GridRegion> regions = presence.Scene.RequestModuleInterface<IGridRegisterModule>().GetNeighbors(presence.Scene); regions.RemoveAll((r) => r.RegionID != destination.RegionID); sendUpdates = regions.Count == 0; } backup.DeleteSceneObjects(attachments, false, sendUpdates); } } #endregion #region Client Events #region Subscribing to Client Events protected void SubscribeToClientEvents(IClientAPI client) { client.OnRezSingleAttachmentFromInv += ClientRezSingleAttachmentFromInventory; client.OnObjectAttach += ClientAttachObject; client.OnObjectDetach += ClientDetachObject; client.OnObjectDrop += ClientDropObject; client.OnDetachAttachmentIntoInv += DetachSingleAttachmentToInventory; client.OnUpdatePrimGroupPosition += ClientUpdateAttachmentPosition; } protected void UnsubscribeFromClientEvents(IClientAPI client) { client.OnRezSingleAttachmentFromInv -= ClientRezSingleAttachmentFromInventory; client.OnObjectAttach -= ClientAttachObject; client.OnObjectDetach -= ClientDetachObject; client.OnObjectDrop -= ClientDropObject; client.OnDetachAttachmentIntoInv -= DetachSingleAttachmentToInventory; client.OnUpdatePrimGroupPosition -= ClientUpdateAttachmentPosition; } #endregion protected UUID ClientRezSingleAttachmentFromInventory( IClientAPI remoteClient, UUID itemID, int AttachmentPt) { IScenePresence presence = m_scene.GetScenePresence(remoteClient.AgentId); if (presence != null && presence.SuccessfullyMadeRootAgent) { ISceneEntity att = RezSingleAttachmentFromInventory(remoteClient, itemID, UUID.Zero, AttachmentPt, true); if (null == att) return UUID.Zero; return att.UUID; } return UUID.Zero; } protected void ClientDetachObject(uint objectLocalID, IClientAPI remoteClient) { ISceneEntity group = m_scene.GetGroupByPrim(objectLocalID); if (group != null) { //group.DetachToGround(); DetachSingleAttachmentToInventory(group.RootChild.FromUserInventoryItemID, remoteClient); } else SendKillEntity(new SceneObjectPart { LocalId = objectLocalID }); } protected void ClientDropObject(uint objectLocalID, IClientAPI remoteClient) { ISceneEntity group = m_scene.GetGroupByPrim (objectLocalID); if (group != null) DetachSingleAttachmentToGround(group.UUID, remoteClient); } protected void ClientAttachObject(IClientAPI remoteClient, uint objectLocalID, int AttachmentPt, bool silent) { MainConsole.Instance.Debug("[ATTACHMENTS MODULE]: Invoking AttachObject"); try { // If we can't take it, we can't attach it! ISceneChildEntity part = m_scene.GetSceneObjectPart (objectLocalID); if (part == null) return; // Calls attach with a Zero position AttachObjectFromInworldObject(objectLocalID, remoteClient, part.ParentEntity, AttachmentPt); } catch (Exception e) { MainConsole.Instance.DebugFormat("[ATTACHMENTS MODULE]: exception upon Attach Object {0}", e); } } protected void ClientUpdateAttachmentPosition(uint objectLocalID, Vector3 pos, IClientAPI remoteClient, bool SaveUpdate) { ISceneEntity group = m_scene.GetGroupByPrim (objectLocalID); if (group != null) { if (group.IsAttachment || (group.RootChild.Shape.PCode == 9 && group.RootChild.Shape.State != 0)) { //Move has edit permission as well if (m_scene.Permissions.CanMoveObject(group.UUID, remoteClient.AgentId)) { //Only deal with attachments! UpdateAttachmentPosition(remoteClient, group, objectLocalID, pos); } } } } #endregion #region Public Methods #region Attach public bool AttachObjectFromInworldObject(uint localID, IClientAPI remoteClient, ISceneEntity group, int AttachmentPt) { if (m_scene.Permissions.CanTakeObject(group.UUID, remoteClient.AgentId)) FindAttachmentPoint(remoteClient, localID, group, AttachmentPt, UUID.Zero, true); else { remoteClient.SendAgentAlertMessage( "You don't have sufficient permissions to attach this object", false); return false; } return true; } public ISceneEntity RezSingleAttachmentFromInventory ( IClientAPI remoteClient, UUID itemID, UUID assetID, int AttachmentPt, bool updateUUIDs) { MainConsole.Instance.DebugFormat( "[ATTACHMENTS MODULE]: Rezzing attachment to point {0} from item {1} for {2}", (AttachmentPoint)AttachmentPt, itemID, remoteClient.Name); IInventoryAccessModule invAccess = m_scene.RequestModuleInterface<IInventoryAccessModule>(); if (invAccess != null) { InventoryItemBase item = null; SceneObjectGroup objatt = assetID == UUID.Zero ? invAccess.CreateObjectFromInventory(remoteClient, itemID, out item) : invAccess.CreateObjectFromInventory(remoteClient, itemID, assetID); if (objatt != null) { #region Set up object for attachment status if (item != null) { assetID = item.AssetID; // Since renaming the item in the inventory does not affect the name stored // in the serialization, transfer the correct name from the inventory to the // object itself before we rez. objatt.RootPart.Name = item.Name; objatt.RootPart.Description = item.Description; } objatt.RootPart.Flags |= PrimFlags.Phantom; objatt.RootPart.IsAttachment = true; objatt.SetFromItemID(itemID, assetID); List<SceneObjectPart> partList = new List<SceneObjectPart>(objatt.ChildrenList); foreach(SceneObjectPart part in partList) part.AttachedAvatar = remoteClient.AgentId; objatt.SetGroup(remoteClient.ActiveGroupId, remoteClient.AgentId, false); if (objatt.RootPart.OwnerID != remoteClient.AgentId) { //Need to kill the for sale here objatt.RootPart.ObjectSaleType = 0; objatt.RootPart.SalePrice = 10; if (m_scene.Permissions.PropagatePermissions()) { if (item == null) { item = new InventoryItemBase(itemID, remoteClient.AgentId); item = m_scene.InventoryService.GetItem(item); } if (item == null) return null; if ((item.CurrentPermissions & 8) != 0) { foreach (SceneObjectPart part in partList) { part.EveryoneMask = item.EveryOnePermissions; part.NextOwnerMask = item.NextPermissions; part.GroupMask = 0; // DO NOT propagate here } } objatt.ApplyNextOwnerPermissions(); } } foreach (SceneObjectPart part in partList) { if (part.OwnerID != remoteClient.AgentId) { part.LastOwnerID = part.OwnerID; part.OwnerID = remoteClient.AgentId; part.Inventory.ChangeInventoryOwner(remoteClient.AgentId); } } objatt.RootPart.TrimPermissions(); objatt.RootPart.IsAttachment = true; objatt.IsDeleted = false; //Update the ItemID with the new item objatt.SetFromItemID(itemID, assetID); //DO NOT SEND THIS KILL ENTITY // If we send this, when someone copies an inworld object, then wears it, the inworld objects disapepars // If a bug is caused by this, we need to figure out some other workaround. //SendKillEntity(objatt.RootChild); //We also have to reset the IDs so that it doesn't have the same IDs as one inworld (possibly)! ISceneEntity[] atts = GetAttachmentsForAvatar(remoteClient.AgentId); foreach (var obj in atts) if (obj.UUID == objatt.UUID) updateUUIDs = false; bool forceUpdateOnNextDeattach = false; try { if (updateUUIDs) { m_scene.SceneGraph.AddPrimToScene(objatt); forceUpdateOnNextDeattach = true;//If the user has information stored about this object, we need to force updating next time } else { foreach (var prim in objatt.ChildrenEntities()) { prim.LocalId = 0; } bool success = m_scene.SceneGraph.RestorePrimToScene(objatt, true); if (!success) MainConsole.Instance.Error("[AttachmentModule]: Failed to add attachment " + objatt.Name + " for user " + remoteClient.Name + "!"); } } catch { } //If we updated the attachment, we need to save the change IScenePresence presence = m_scene.GetScenePresence(remoteClient.AgentId); if (presence != null) { FindAttachmentPoint(remoteClient, objatt.LocalId, objatt, AttachmentPt, assetID, forceUpdateOnNextDeattach); } else objatt = null;//Presence left, kill the attachment #endregion } else { MainConsole.Instance.WarnFormat( "[ATTACHMENTS MODULE]: Could not retrieve item {0} for attaching to avatar {1} at point {2}", itemID, remoteClient.Name, AttachmentPt); } return objatt; } return null; } #endregion #region Detach public void DetachSingleAttachmentToInventory (UUID itemID, IClientAPI remoteClient) { ISceneEntity[] attachments = GetAttachmentsForAvatar (remoteClient.AgentId); IScenePresence presence; if (m_scene.TryGetScenePresence (remoteClient.AgentId, out presence)) { IAvatarAppearanceModule appearance = presence.RequestModuleInterface<IAvatarAppearanceModule> (); if (!appearance.Appearance.DetachAttachment(itemID)) { bool found = false; foreach (ISceneEntity grp in attachments) { if (grp.RootChild.FromUserInventoryItemID == itemID) found = true; } if (!found) return; //Its not attached! What are we doing! } MainConsole.Instance.Debug ("[ATTACHMENTS MODULE]: Detaching from UserID: " + remoteClient.AgentId + ", ItemID: " + itemID); if (AvatarFactory != null) AvatarFactory.QueueAppearanceSave (remoteClient.AgentId); } DetachSingleAttachmentToInventoryInternal (itemID, remoteClient, true); //Find the attachment we are trying to edit by ItemID foreach (ISceneEntity grp in attachments) { if (grp.RootChild.FromUserInventoryItemID == itemID) { //And from storage as well IBackupModule backup = presence.Scene.RequestModuleInterface<IBackupModule> (); if (backup != null) backup.DeleteSceneObjects (new[]{grp}, false, true); } } } public void DetachSingleAttachmentToGround(UUID itemID, IClientAPI remoteClient) { ISceneChildEntity part = m_scene.GetSceneObjectPart (itemID); if (part == null || part.ParentEntity == null) return; if (part.ParentEntity.RootChild.AttachedAvatar != remoteClient.AgentId) return; UUID inventoryID = part.ParentEntity.RootChild.FromUserInventoryItemID; IScenePresence presence; if (m_scene.TryGetScenePresence(remoteClient.AgentId, out presence)) { string reason; if (!m_scene.Permissions.CanRezObject( part.ParentEntity.PrimCount, remoteClient.AgentId, presence.AbsolutePosition, out reason)) return; IAvatarAppearanceModule appearance = presence.RequestModuleInterface<IAvatarAppearanceModule> (); appearance.Appearance.DetachAttachment (itemID); AvatarFactory.QueueAppearanceSave(remoteClient.AgentId); part.ParentEntity.DetachToGround(); List<UUID> uuids = new List<UUID> {inventoryID}; m_scene.InventoryService.DeleteItems(remoteClient.AgentId, uuids); remoteClient.SendRemoveInventoryItem(inventoryID); } m_scene.EventManager.TriggerOnAttach(part.ParentEntity.LocalId, itemID, UUID.Zero); } #endregion #region Get/Update/Send /// <summary> /// Update the position of the given attachment /// </summary> /// <param name="client"></param> /// <param name="ItemID"></param> /// <param name="pos"></param> public void UpdateAttachmentPosition(IClientAPI client, ISceneEntity sog, uint localID, Vector3 pos) { if (sog != null) { // If this is an attachment, then we need to save the modified // object back into the avatar's inventory. First we save the // attachment point information, then we update the relative // positioning (which caused this method to get driven in the // first place. Then we have to mark the object as NOT an // attachment. This is necessary in order to correctly save // and retrieve GroupPosition information for the attachment. // Then we save the asset back into the appropriate inventory // entry. Finally, we restore the object's attachment status. byte attachmentPoint = (byte)sog.RootChild.AttachmentPoint; sog.UpdateGroupPosition(pos, true); sog.RootChild.AttachedPos = pos; sog.RootChild.FixOffsetPosition((pos), false); //sog.AbsolutePosition = sog.RootChild.AttachedPos; sog.SetAttachmentPoint(attachmentPoint); sog.ScheduleGroupUpdate(PrimUpdateFlags.TerseUpdate); //Don't update right now, wait until logout //UpdateKnownItem(client, sog, sog.GetFromItemID(), sog.OwnerID); } else { MainConsole.Instance.Warn("[Attachments]: Could not find attachment by ItemID!"); } } /// <summary> /// Get all of the attachments for the given avatar /// </summary> /// <param name="avatarID">The avatar whose attachments will be returned</param> /// <returns>The avatar's attachments as SceneObjectGroups</returns> public ISceneEntity[] GetAttachmentsForAvatar (UUID avatarID) { ISceneEntity[] attachments = new ISceneEntity[0]; IScenePresence presence = m_scene.GetScenePresence (avatarID); if (presence != null) { AvatarAttachments attPlugin = presence.RequestModuleInterface<AvatarAttachments>(); if (attPlugin != null) attachments = attPlugin.Get(); } return attachments; } /// <summary> /// Send a script event to this scene presence's attachments /// </summary> /// <param name="avatarID">The avatar to fire the event for</param> /// <param name="eventName">The name of the event</param> /// <param name="args">The arguments for the event</param> public void SendScriptEventToAttachments(UUID avatarID, string eventName, Object[] args) { ISceneEntity[] attachments = GetAttachmentsForAvatar (avatarID); IScriptModule[] scriptEngines = m_scene.RequestModuleInterfaces<IScriptModule>(); foreach (ISceneEntity grp in attachments) { foreach (IScriptModule m in scriptEngines) { if (m == null) // No script engine loaded continue; m.PostObjectEvent(grp.UUID, eventName, args); } } } #endregion #endregion #region Internal Methods /// <summary> /// Attach the object to the avatar /// </summary> /// <param name="remoteClient">The client that is having the attachment done</param> /// <param name="localID">The localID (SceneObjectPart) that is being attached (for the attach script event)</param> /// <param name="group">The group (SceneObjectGroup) that is being attached</param> /// <param name="AttachmentPt">The point to where the attachment will go</param> /// <param name="item">If this is not null, it saves a query in this method to the InventoryService /// This is the Item that the object is in (if it is in one yet)</param> /// <param name="assetID"/> /// <param name="forceUpdatePrim">Force updating of the prim the next time the user attempts to deattach it</param> protected void FindAttachmentPoint (IClientAPI remoteClient, uint localID, ISceneEntity group, int AttachmentPt, UUID assetID, bool forceUpdatePrim) { //Make sure that we arn't over the limit of attachments ISceneEntity[] attachments = GetAttachmentsForAvatar (remoteClient.AgentId); if (attachments.Length + 1 > m_maxNumberOfAttachments) { //Too many remoteClient.SendAgentAlertMessage( "You are wearing too many attachments. Take one off to attach this object", false); return; } Vector3 attachPos = group.GetAttachmentPos(); bool hasMultipleAttachmentsSet = (AttachmentPt & 0x7f) != 0 || AttachmentPt == 0; if(!m_allowMultipleAttachments) hasMultipleAttachmentsSet = false; AttachmentPt &= 0x7f; //Disable it! Its evil! //Did the attachment change position or attachment point? bool changedPositionPoint = false; // If the attachment point isn't the same as the one previously used // set it's offset position = 0 so that it appears on the attachment point // and not in a weird location somewhere unknown. //Simplier terms: the attachment point changed, set it to the default 0,0,0 location if (AttachmentPt != 0 && AttachmentPt != (group.GetAttachmentPoint() & 0x7f)) { attachPos = Vector3.Zero; changedPositionPoint = true; } else { // AttachmentPt 0 means the client chose to 'wear' the attachment. if (AttachmentPt == 0) { // Check object for stored attachment point AttachmentPt = group.GetSavedAttachmentPoint() & 0x7f; attachPos = group.GetAttachmentPos(); } //Check state afterwards... use the newer GetSavedAttachmentPoint and Pos above first if (AttachmentPt == 0) { // Check object for older stored attachment point AttachmentPt = group.RootChild.Shape.State & 0x7f; //attachPos = group.AbsolutePosition; } // if we still didn't find a suitable attachment point, force it to the default //This happens on the first time an avatar 'wears' an object if (AttachmentPt == 0) { // Stick it on right hand with Zero Offset from the attachment point. AttachmentPt = (int)AttachmentPoint.RightHand; //Default location attachPos = Vector3.Zero; changedPositionPoint = true; } } MainConsole.Instance.InfoFormat( "[ATTACHMENTS MODULE]: Retrieved single object {0} for attachment to {1} on point {2} localID {3}", group.Name, remoteClient.Name, AttachmentPt, group.LocalId); //Update where we are put group.SetAttachmentPoint((byte)AttachmentPt); //Fix the position with the one we found group.AbsolutePosition = attachPos; // Remove any previous attachments IScenePresence presence = m_scene.GetScenePresence (remoteClient.AgentId); if (presence == null) return; UUID itemID = UUID.Zero; //Check for multiple attachment bits and whether we should remove the old if(!hasMultipleAttachmentsSet) { foreach (ISceneEntity grp in attachments) { if (grp.GetAttachmentPoint() == (byte)AttachmentPt) { itemID = grp.RootChild.FromUserInventoryItemID; break; } } if (itemID != UUID.Zero) DetachSingleAttachmentToInventory(itemID, remoteClient); } itemID = group.RootChild.FromUserInventoryItemID; group.RootChild.AttachedAvatar = presence.UUID; List<ISceneChildEntity> parts = group.ChildrenEntities(); foreach (ISceneChildEntity t in parts) t.AttachedAvatar = presence.UUID; if (group.RootChild.PhysActor != null) { m_scene.PhysicsScene.DeletePrim (group.RootChild.PhysActor); group.RootChild.PhysActor = null; } group.RootChild.AttachedPos = attachPos; group.RootChild.IsAttachment = true; group.AbsolutePosition = attachPos; group.RootChild.SetParentLocalId (presence.LocalId); group.SetAttachmentPoint(Convert.ToByte(AttachmentPt)); AvatarAttachments attPlugin = presence.RequestModuleInterface<AvatarAttachments>(); if (attPlugin != null) { attPlugin.AddAttachment (group); presence.SetAttachments(attPlugin.Get()); IAvatarAppearanceModule appearance = presence.RequestModuleInterface<IAvatarAppearanceModule>(); bool save = appearance.Appearance.CheckWhetherAttachmentChanged(AttachmentPt, itemID, assetID); if (save) { appearance.Appearance.SetAttachments(attPlugin.Get()); AvatarFactory.QueueAppearanceSave(remoteClient.AgentId); } } // Killing it here will cause the client to deselect it // It then reappears on the avatar, deselected // through the full update below // if (group.IsSelected) { foreach (ISceneChildEntity part in group.ChildrenEntities()) { part.CreateSelected = true; } } //NOTE: This MUST be here, otherwise we limit full updates during attachments when they are selected and it will block the first update. // So until that is changed, this MUST stay. The client will instantly reselect it, so this value doesn't stay borked for long. group.IsSelected = false; if (itemID == UUID.Zero) { //Delete the object inworld to inventory List<ISceneEntity> groups = new List<ISceneEntity> (1) { group }; IInventoryAccessModule inventoryAccess = m_scene.RequestModuleInterface<IInventoryAccessModule>(); if (inventoryAccess != null) inventoryAccess.DeleteToInventory(DeRezAction.AcquireToUserInventory, UUID.Zero, groups, remoteClient.AgentId, out itemID); } else { //it came from an item, we need to start the scripts // Fire after attach, so we don't get messy perms dialogs // 4 == AttachedRez group.CreateScriptInstances(0, true, StateSource.AttachedRez, UUID.Zero); } if (UUID.Zero == itemID) { MainConsole.Instance.Error("[ATTACHMENTS MODULE]: Unable to save attachment. Error inventory item ID."); remoteClient.SendAgentAlertMessage( "Unable to save attachment. Error inventory item ID.", false); return; } // XXYY!! if (assetID == UUID.Zero) { InventoryItemBase item = new InventoryItemBase(itemID, remoteClient.AgentId); item = m_scene.InventoryService.GetItem(item); //Update the ItemID with the new item group.SetFromItemID(itemID, item.AssetID); //If we updated the attachment, we need to save the change IAvatarAppearanceModule appearance = presence.RequestModuleInterface<IAvatarAppearanceModule>(); if (appearance.Appearance.SetAttachment(AttachmentPt, itemID, item.AssetID)) AvatarFactory.QueueAppearanceSave(remoteClient.AgentId); } // In case it is later dropped again, don't let // it get cleaned up group.RootChild.RemFlag(PrimFlags.TemporaryOnRez); group.HasGroupChanged = changedPositionPoint || forceUpdatePrim; //Now recreate it so that it is selected group.ScheduleGroupUpdate(PrimUpdateFlags.ForcedFullUpdate); m_scene.EventManager.TriggerOnAttach(localID, group.RootChild.FromUserInventoryItemID, remoteClient.AgentId); } protected void SendKillEntity(ISceneChildEntity rootPart) { #if (!ISWIN) m_scene.ForEachClient(delegate(IClientAPI client) { client.SendKillObject(m_scene.RegionInfo.RegionHandle, new IEntity[] { rootPart }); }); #else m_scene.ForEachClient( client => client.SendKillObject(m_scene.RegionInfo.RegionHandle, new IEntity[] {rootPart})); #endif } // What makes this method odd and unique is it tries to detach using an UUID.... Yay for standards. // To LocalId or UUID, *THAT* is the question. How now Brown UUID?? protected void DetachSingleAttachmentToInventoryInternal(UUID itemID, IClientAPI remoteClient, bool fireEvent) { if (itemID == UUID.Zero) // If this happened, someone made a mistake.... return; // We can NOT use the dictionaries here, as we are looking // for an entity by the fromAssetID, which is NOT the prim UUID ISceneEntity[] attachments = GetAttachmentsForAvatar (remoteClient.AgentId); foreach (ISceneEntity group in attachments) { if (group.RootChild.FromUserInventoryItemID == itemID) { DetachSingleAttachmentGroupToInventoryInternal (itemID, remoteClient, fireEvent, group); return; } } } private void DetachSingleAttachmentGroupToInventoryInternal (UUID itemID, IClientAPI remoteClient, bool fireEvent, ISceneEntity group) { if (fireEvent) { m_scene.EventManager.TriggerOnAttach (group.LocalId, itemID, UUID.Zero); group.DetachToInventoryPrep (); } IScenePresence presence = m_scene.GetScenePresence (remoteClient.AgentId); if (presence != null) { AvatarAttachments attModule = presence.RequestModuleInterface<AvatarAttachments> (); if (attModule != null) attModule.RemoveAttachment (group); if (attModule != null) presence.SetAttachments(attModule.Get()); } MainConsole.Instance.Debug ("[ATTACHMENTS MODULE]: Saving attachpoint: " + ((uint)group.GetAttachmentPoint ()).ToString ()); //Update the saved attach points if (group.RootChild.AttachedPos != group.RootChild.SavedAttachedPos || group.RootChild.SavedAttachmentPoint != group.RootChild.AttachmentPoint) { group.RootChild.SavedAttachedPos = group.RootChild.AttachedPos; group.RootChild.SavedAttachmentPoint = group.RootChild.AttachmentPoint; //Make sure we get updated group.HasGroupChanged = true; } // If an item contains scripts, it's always changed. // This ensures script state is saved on detach foreach (ISceneChildEntity p in group.ChildrenEntities ()) { if (p.Inventory.ContainsScripts ()) { group.HasGroupChanged = true; break; } } if(group.HasGroupChanged) UpdateKnownItem (remoteClient, group, group.RootChild.FromUserInventoryItemID, group.OwnerID); } /// <summary> /// Update the attachment asset for the new sog details if they have changed. /// </summary> /// /// This is essential for preserving attachment attributes such as permission. Unlike normal scene objects, /// these details are not stored on the region. /// /// <param name="remoteClient"></param> /// <param name="grp"></param> /// <param name="itemID"></param> /// <param name="agentID"></param> protected UUID UpdateKnownItem (IClientAPI remoteClient, ISceneEntity grp, UUID itemID, UUID agentID) { if (grp != null) { if (!grp.HasGroupChanged) { //MainConsole.Instance.WarnFormat("[ATTACHMENTS MODULE]: Save request for {0} which is unchanged", grp.UUID); return UUID.Zero; } //let things like state saves and another async things be performed before we serialize the object grp.BackupPreparation(); MainConsole.Instance.InfoFormat( "[ATTACHMENTS MODULE]: Updating asset for attachment {0}, attachpoint {1}", grp.UUID, grp.GetAttachmentPoint()); string sceneObjectXml = SceneObjectSerializer.ToOriginalXmlFormat((SceneObjectGroup)grp); AssetBase asset = new AssetBase(UUID.Random(), grp.Name, AssetType.Object, remoteClient.AgentId) { Description = grp.RootChild.Description, Data = Utils.StringToBytes(sceneObjectXml) }; asset.ID = m_scene.AssetService.Store(asset); m_scene.InventoryService.UpdateAssetIDForItem(itemID, asset.ID); // this gets called when the agent logs off! //remoteClient.SendInventoryItemCreateUpdate(item, 0); return asset.ID; } return UUID.Zero; } #endregion #region Per Presence Attachment Module private class AvatarAttachments { private readonly List<ISceneEntity> m_attachments = new List<ISceneEntity> (); public AvatarAttachments (IScenePresence SP) { } public void AddAttachment (ISceneEntity attachment) { lock (m_attachments) { m_attachments.RemoveAll((a) => attachment.UUID == a.UUID); m_attachments.Add(attachment); } } public void RemoveAttachment (ISceneEntity attachment) { lock (m_attachments) { m_attachments.RemoveAll((a) => attachment.UUID == a.UUID); } } public ISceneEntity[] Get () { ISceneEntity[] attachments = new ISceneEntity[m_attachments.Count]; lock (m_attachments) { m_attachments.CopyTo(attachments); } return attachments; } } #endregion } }
using System; using UnityEngine; using System.Collections.Generic; using System.Linq; using HandyGestures; public class HandyDetector : MonoBehaviour { #region Helper enums & editor variables public enum Gesture { Press, LongPress, Tap, Pan, Fling } public enum CollisionMode { Mode3D, Mode2D } public enum CollisionMethodType { Raycast, Spherecast, Custom, } public enum CollisionMethodType2D { Point, Circle, Custom } public delegate RaycastHit[] CollisionFunction(Vector3 position,float distance,int mask); public delegate Collider2D[] CollisionFunction2D( Vector3 position,int mask,float minDepth,float maxDepth); #endregion #region settings public bool DisableCasting; public CollisionMode CastMode; public CollisionMethodType CollisionMethod; public CollisionMethodType2D CollisionMethod2D; public CollisionFunctor CustomFunction; public CollisionFunctor2D CustomFunction2D; CollisionFunction _function; CollisionFunction2D _function2D; public float longTime = 0.2f; public float flingTime = 0.2f; public Transform interceptor; public Transform defaultObject; public int layerMask = -1; public float castDistance = Mathf.Infinity; public float maxDepth = Mathf.Infinity; public float minDepth = Mathf.NegativeInfinity; public float circleRadius = 16; public float sphereRadius = 1; public float moveThreshold = 1; // Analysis disable once FieldCanBeMadeReadOnly.Local #endregion Device _device; InputState _activeState; InputState _idle; Vector2 _startPos; Vector2 _prevPos; Vector2 _deltaMove; List<Transform> _objects; Gesture _prevType; IGesture _target; void Reset() { _target = null; //_prevType = Gesture.Press; if (_activeState != null && _activeState != _idle) { _idle.Activate(); } _activeState = _idle; } bool moved() { return _deltaMove.magnitude >= moveThreshold; } #region Start & Setup States void Start() { defaultThreshold = moveThreshold; Grouping.GroupManager.main.group["Main Menu"].Add(this, new Grouping.GroupDelegator(null, GameMenuEnter, GameMenuExit)); Grouping.GroupManager.main.group["Level Select"].Add(this, new Grouping.GroupDelegator(null, GameMenuEnter, GameMenuExit)); _idle = _activeState = new InputState(this); InputState deadState = new InputState(this); InputState downState = new InputState(this); InputState panState = new InputState(this); InputState longState = new InputState(this); InputState slideState = new InputState(this); deadState.Up += Reset; deadState.Interrupt += Reset; _idle.Down += delegate { _startPos = _device.pos1; _objects = GetAllObjects(_device.pos1); var args1 = new PressArgs(Gesture.Press, _startPos); _target = DoGesture<IPress, PressArgs>(_objects, (t, a) => t.OnGesturePress(a), args1); _prevType = Gesture.Press; downState.Activate(); }; downState.Up += delegate { if (downState.time < longTime) { TapArgs args = new TapArgs(Gesture.Tap, _startPos); if (_target != null) { ITap tap = _target as ITap; if (IsEnabled(tap as Behaviour)) { _prevType = Gesture.Tap; tap.OnGestureTap(args); } } else { _prevType = Gesture.Tap; DoGesture<ITap, TapArgs>(_objects, (t, a) => t.OnGestureTap(a), args); } } Reset(); }; downState.Moved += delegate { slideState.Activate(); }; downState.Hold += delegate { if (downState.time > longTime) { LongPressArgs largs = new LongPressArgs(Gesture.Press, LongPressArgs.State.Down, downState.startPos); PanArgs pArgs = new PanArgs(Gesture.Press, PanArgs.State.Down, downState.startPos, downState.startPos, Vector2.zero); IPan pan; ILongPress lpress; if (_target != null) { lpress = _target as ILongPress; bool handled = false; if (IsEnabled(lpress as Behaviour)) { lpress.OnGestureLongPress(largs); handled = largs.handled; _prevType = Gesture.LongPress; longState.Activate(); } if (!handled) { pan = _target as IPan; if (IsEnabled(pan as Behaviour)) { pan.OnGesturePan(pArgs); if (pArgs.handled) { panState.Activate(); } else { deadState.Activate(); } } } } else { foreach (var item in _objects) { bool flag = false; var lpresses = item.GetComponents(typeof(ILongPress)); foreach (var component in lpresses) { lpress = component as ILongPress; if (IsEnabled(lpress as Behaviour)) { lpress.OnGestureLongPress(largs); if (largs.handled) { _target = lpress; _prevType = Gesture.LongPress; longState.Activate(); flag = true; break; } } } if (flag) { break; } var pans = item.GetComponents(typeof(IPan)); foreach (var component in pans) { pan = component as IPan; pan = component as IPan; if (IsEnabled(pan as Behaviour)) { pan.OnGesturePan(pArgs); if (pArgs.handled) { _target = pan; _prevType = Gesture.Pan; panState.Activate(); flag = true; break; } } } if (flag) { break; } } } if (_target == null) { deadState.Activate(); } } }; slideState.Enter += delegate { FlingArgs sArgs = new FlingArgs(Gesture.Press, FlingArgs.State.Start, slideState.startPos, slideState.startPos); PanArgs pArgs = new PanArgs(Gesture.Press, PanArgs.State.Down, slideState.startPos, slideState.startPos, Vector2.zero); IPan pan; IFling fling; if (_target != null) { fling = _target as IFling; bool handled = false; if (IsEnabled(fling as Behaviour)) { fling.OnGestureFling(sArgs); handled = sArgs.handled; if (handled) { //Stay in slide state! _prevType = Gesture.Fling; } } if (!handled) { pan = _target as IPan; if (IsEnabled(pan as Behaviour)) { pan.OnGesturePan(pArgs); if (pArgs.handled) { _prevType = Gesture.Pan; //Change state! panState.Activate(); } } } } else { foreach (var item in _objects) { var flings = item.GetComponents(typeof(IFling)); bool flag = false; foreach (var component in flings) { fling = component as IFling; if (IsEnabled(fling as Behaviour)) { fling.OnGestureFling(sArgs); if (sArgs.handled) { //Stay in slide state! _target = fling; _prevType = Gesture.Fling; flag = true; break; } } } if (flag) { break; } var pans = item.GetComponents(typeof(IPan)); foreach (var component in pans) { pan = component as IPan; if (IsEnabled(pan as Behaviour)) { pan.OnGesturePan(pArgs); if (pArgs.handled) { //Change state! _target = pan; _prevType = Gesture.Pan; panState.Activate(); flag = true; break; } } } if (flag) { break; } } } if (_target == null) { deadState.Activate(); } }; slideState.Up += delegate { if (slideState.time <= flingTime) { FlingArgs sArgs = new FlingArgs(_prevType, FlingArgs.State.End, slideState.startPos, _device.pos1); IFling f = _target as IFling; if (IsEnabled(f as Behaviour)) { f.OnGestureFling(sArgs); } } Reset(); }; slideState.Hold += delegate { if (slideState.time > flingTime) { FlingArgs sArgs = new FlingArgs(Gesture.Press, FlingArgs.State.Interrupt, slideState.startPos, _device.pos1); IFling f = _target as IFling; if (IsEnabled(f as Behaviour)) { f.OnGestureFling(sArgs); } IPan p = _target as IPan; bool handled = false; if (IsEnabled(p as Behaviour)) { PanArgs pArgs = new PanArgs(Gesture.Fling, PanArgs.State.Down, _device.pos1, _device.pos1, Vector2.zero); p.OnGesturePan(pArgs); handled = pArgs.handled; } if (!handled) { deadState.Activate(); } else { panState.Activate(); } } }; slideState.Interrupt += delegate { if (slideState.time <= flingTime) { FlingArgs sArgs = new FlingArgs(Gesture.Fling, FlingArgs.State.Interrupt, slideState.startPos, _device.pos1); IFling f = _target as IFling; if (f != null) { f.OnGestureFling(sArgs); } } Reset(); }; longState.Hold += delegate { ILongPress lpress = _target as ILongPress; if (IsEnabled(lpress as Behaviour)) { LongPressArgs args = new LongPressArgs(Gesture.Press, LongPressArgs.State.Hold, longState.startPos); lpress.OnGestureLongPress(args); } }; longState.Up += delegate { ILongPress lpress = _target as ILongPress; if (IsEnabled(lpress as Behaviour)) { LongPressArgs args = new LongPressArgs(Gesture.Press, LongPressArgs.State.Up, longState.startPos); lpress.OnGestureLongPress(args); } Reset(); }; longState.Interrupt += delegate { ILongPress lpress = _target as ILongPress; if (lpress != null) { LongPressArgs args = new LongPressArgs(Gesture.Press, LongPressArgs.State.Interrupt, longState.startPos); lpress.OnGestureLongPress(args); } Reset(); }; longState.Moved += delegate { ILongPress lpress = _target as ILongPress; if (lpress != null && IsEnabled(lpress as Behaviour)) { LongPressArgs args = new LongPressArgs(Gesture.Press, LongPressArgs.State.Interrupt, longState.startPos); lpress.OnGestureLongPress(args); if (!args.handled) { deadState.Activate(); } else { IPan pan = _target as IPan; if (IsEnabled(pan as Behaviour)) { PanArgs pArgs = new PanArgs(Gesture.LongPress, PanArgs.State.Down, _device.pos1, _device.pos1, Vector2.zero); pan.OnGesturePan(pArgs); if (pArgs.handled) { panState.Activate(); } else { deadState.Activate(); } } } } }; panState.Hold += delegate { if (!moved()) { IPan pan = _target as IPan; if (IsEnabled(pan as Behaviour)) { PanArgs pArgs = new PanArgs(Gesture.Pan, PanArgs.State.Hold, panState.startPos, _device.pos1, Vector2.zero); pan.OnGesturePan(pArgs); } } }; panState.Moved += delegate { IPan pan = _target as IPan; if (IsEnabled(pan as Behaviour)) { PanArgs pArgs = new PanArgs(Gesture.Pan, PanArgs.State.Move, panState.startPos, _device.pos1, _deltaMove); pan.OnGesturePan(pArgs); } }; panState.Up += delegate { IPan pan = _target as IPan; if (IsEnabled(pan as Behaviour)) { PanArgs pArgs = new PanArgs(Gesture.Pan, PanArgs.State.Up, panState.startPos, _device.pos1, Vector2.zero); pan.OnGesturePan(pArgs); } Reset(); }; panState.Interrupt += delegate { IPan pan = _target as IPan; if (pan != null) { PanArgs pArgs = new PanArgs(Gesture.Pan, PanArgs.State.Interrupt, panState.startPos, _device.pos1, Vector2.zero); pan.OnGesturePan(pArgs); } Reset(); }; Reset(); } #endregion private delegate void GestureAction<T,TA>(T obj,TA args); T DoGesture<T, TA>(Vector2 pos, GestureAction<T, TA> action, TA arg) where TA: TouchArg where T: class, IGesture { return DoGesture(GetAllObjects(pos), action, arg); } static T DoGesture<T, TA>(List<Transform> list, GestureAction<T, TA> action, TA arg) where TA: TouchArg where T: class, IGesture { foreach (var item in list) { var gestures = item.GetComponents(typeof(T)); foreach (var component in gestures) { var gesture = component as T; var behaviour = gesture as Behaviour; if (IsEnabled(behaviour)) { action(gesture, arg); if (arg.handled) { return gesture; } } } } return null; } // Update is called once per frame void Update() { _device.Update(); _deltaMove = _prevPos - _device.pos1; _prevPos = _device.pos1; UpdateCollisionTypes(); if (_target != null && !IsEnabled(_target as Behaviour)) { _activeState.InvokeInterrupt(); Reset(); } if (_device.state1 == Device.DeviceState.Down) { _activeState.InvokeDown(); } if (_device.state1 == Device.DeviceState.Hold) { if (moved()) { _activeState.InvokeMoved(); } else { _activeState.Update(); } } if (_device.state1 == Device.DeviceState.Up) { IGesture gesture = _target; _activeState.InvokeUp(); if (gesture != null) { IFinished finished = gesture as IFinished; if (finished != null) { FinishedArgs args = new FinishedArgs(_prevType, false, _startPos, _device.pos1); finished.OnGestureFinished(args); } } } } Camera getCamera() { //UNDONE: Placeholder? //return camera == null ? Camera.main : camera; return Camera.main; } #region Receiving objects void UpdateCollisionTypes() { if (CastMode == CollisionMode.Mode3D) { switch (CollisionMethod) { case CollisionMethodType.Raycast: _function = Raycast3D; break; case CollisionMethodType.Spherecast: _function = Spherecast3D; break; default: _function = CustomResults; break; } } else { switch (CollisionMethod2D) { case CollisionMethodType2D.Point: _function2D = Raycast2D; break; case CollisionMethodType2D.Circle: _function2D = Circlecast2D; break; default: _function2D = CustomResults2D; break; } } } List<Transform> GetAllObjects(Vector2 pos) { switch (CastMode) { case CollisionMode.Mode2D: return GetAllObjects2D(pos); default: return GetAllObjects3D(new Vector3(pos.x, pos.y, 0)); } } List<Transform> GetAllObjects2D(Vector2 pos) { //var results = Physics2D.RaycastAll(pos, -Vector2.up, castDistance, layerMask); _resList.Clear(); var results = _function2D(pos, layerMask, minDepth, maxDepth); if (results == null) { return _resList; } if (interceptor != null) { _resList.Add(interceptor); } if (!DisableCasting) { _resList.AddRange( from _ in results where (interceptor == null || _.transform != interceptor.transform) && (defaultObject == null || _.transform != defaultObject.transform) orderby _.transform.position.z select _.transform); } //results.Where(_ => _.transform != interceptor && _.transform != defaultObject) // .OrderBy(_ => _.transform.position.z).Select(_ => _.transform)); if (defaultObject != null) { _resList.Add(defaultObject); } return _resList; } static bool IsEnabled(Behaviour b) { return b != null && (b.enabled || b.GetType().IsDefined(typeof(PersistentGestureAttribute), false)); } List<Transform> _resList = new List<Transform>(); List<Transform> GetAllObjects3D(Vector3 pos) { //var results = Physics2D.RaycastAll(pos, -Vector2.up, castDistance, layerMask); _resList.Clear(); var results = _function(pos, castDistance, layerMask); if (results == null) { return _resList; } if (interceptor != null) { _resList.Add(interceptor.transform); } if (!DisableCasting) { _resList.AddRange( from _ in results where (interceptor == null || _.transform != interceptor.transform) && (defaultObject == null || _.transform != defaultObject.transform) orderby _.distance select _.transform); } // results.Where(_ => _.transform != interceptor && _.transform != defaultObject) // .OrderBy(_ => _.distance).Select(_ => _.transform)); if (defaultObject != null) { _resList.Add(defaultObject.transform); } return _resList; } RaycastHit[] CustomResults(Vector3 position, float distance, int mask) { if (CustomFunction != null) { return CustomFunction.GetResults(position, distance, mask); } return null; } Collider2D[] CustomResults2D(Vector3 screenPoint, int mask, float min, float max) { if (CustomFunction2D != null) { return CustomFunction2D.GetResults(getCamera().ScreenToWorldPoint(screenPoint).xy(), mask, min, max); } return null; } Collider2D[] Raycast2D(Vector3 screenPoint, int mask, float min, float max) { return Physics2D.OverlapPointAll(getCamera().ScreenToWorldPoint(screenPoint).xy(), mask, min, max); } Collider2D[] Circlecast2D(Vector3 position, int mask, float min, float max) { return Physics2D.OverlapCircleAll(position, circleRadius, mask, min, max); } RaycastHit[] Spherecast3D(Vector3 position, float distance, int mask) { Ray r = getCamera().ScreenPointToRay(position); return Physics.SphereCastAll(r, circleRadius, distance, mask); } RaycastHit[] Raycast3D(Vector3 position, float distance, int mask) { Ray r = getCamera().ScreenPointToRay(position); return Physics.RaycastAll(r, distance, mask); } #endregion #region InputState class InputState { public event Callback Enter; public event Callback Exit; public event Callback Hold; public event Callback Down; public event Callback Up; public event Callback Moved; public event Callback Interrupt; readonly HandyDetector _parent; public float time { get; private set; } public Vector2 startPos { get; private set; } public delegate void Callback(); public void Activate() { if (_parent._activeState.Exit != null) { _parent._activeState.Exit(); } _parent._activeState.time = 0; _parent._activeState = this; startPos = _parent._device.pos1; if (Enter != null) { Enter(); } } public void Update() { time += Time.deltaTime; if (Hold != null) { Hold(); } } public void InvokeInterrupt() { if (Interrupt != null) { Interrupt(); } } public void InvokeDown() { if (Down != null) { Down(); } } public void InvokeMoved() { if (Moved != null) { Moved(); } } public void InvokeUp() { if (Up != null) { Up(); } } public InputState(HandyDetector parent) { _parent = parent; } } #endregion #region Device /// <summary> Abstract mouse and touch input </summary> struct Device { public enum DeviceState { None, Up, Down, Hold, Error } public DeviceState state1 { get ; private set; } public DeviceState state2 { get ; private set; } public Vector2 pos1 { get ; private set; } public Vector2 pos2 { get ; private set; } public static DeviceState FingerState(uint index) { if (Input.touchCount <= index) { return DeviceState.None; } switch (Input.touches[index].phase) { case TouchPhase.Began: return DeviceState.Down; case TouchPhase.Moved: case TouchPhase.Stationary: return DeviceState.Hold; case TouchPhase.Ended: return DeviceState.Up; default: return DeviceState.Error; } } public static int FingerCount() { return Input.touchCount; } public Vector2 FingerPosition(int index) { if (index == 0) { return pos1; } return Input.touches[index].position; } public DeviceState MouseState(int index) { if (Input.GetMouseButtonDown(index)) { return DeviceState.Down; } if (Input.GetMouseButton(index)) { return DeviceState.Hold; } if (Input.GetMouseButtonUp(index)) { return DeviceState.Up; } return DeviceState.None; } public void Update() { if (Input.touchCount > 0) { pos1 = Input.touches[0].position; state1 = FingerState(0); if (Input.touchCount == 1) { pos2 = pos1; state2 = DeviceState.None; } else { pos2 = Input.touches[1].position; state2 = FingerState(1); } } else { //Mouse input pos1 = pos2 = new Vector2(Input.mousePosition.x, Input.mousePosition.y); state1 = MouseState(0); state2 = MouseState(1); } } } #endregion float defaultThreshold; void GameMenuEnter() { moveThreshold = 0.0f; } void GameMenuExit() { moveThreshold = defaultThreshold; } }
// // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Net; using System.Net.Http; using System.Net.Http.Headers; using System.Text; using System.Threading; using System.Threading.Tasks; using Hyak.Common; using Microsoft.Azure; using Microsoft.Azure.Management.ApiManagement; using Microsoft.Azure.Management.ApiManagement.SmapiModels; using Newtonsoft.Json.Linq; namespace Microsoft.Azure.Management.ApiManagement { /// <summary> /// Operations for managing Tenant Policy. /// </summary> internal partial class TenantPolicyOperations : IServiceOperations<ApiManagementClient>, ITenantPolicyOperations { /// <summary> /// Initializes a new instance of the TenantPolicyOperations class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> internal TenantPolicyOperations(ApiManagementClient client) { this._client = client; } private ApiManagementClient _client; /// <summary> /// Gets a reference to the /// Microsoft.Azure.Management.ApiManagement.ApiManagementClient. /// </summary> public ApiManagementClient Client { get { return this._client; } } /// <summary> /// Deletes tenant-level policy. /// </summary> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='serviceName'> /// Required. The name of the Api Management service. /// </param> /// <param name='etag'> /// Required. ETag. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public async Task<AzureOperationResponse> DeleteAsync(string resourceGroupName, string serviceName, string etag, CancellationToken cancellationToken) { // Validate if (resourceGroupName == null) { throw new ArgumentNullException("resourceGroupName"); } if (serviceName == null) { throw new ArgumentNullException("serviceName"); } if (etag == null) { throw new ArgumentNullException("etag"); } // Tracing bool shouldTrace = TracingAdapter.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = TracingAdapter.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("serviceName", serviceName); tracingParameters.Add("etag", etag); TracingAdapter.Enter(invocationId, this, "DeleteAsync", tracingParameters); } // Construct URL string url = ""; url = url + "/subscriptions/"; if (this.Client.Credentials.SubscriptionId != null) { url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId); } url = url + "/resourceGroups/"; url = url + Uri.EscapeDataString(resourceGroupName); url = url + "/providers/"; url = url + "Microsoft.ApiManagement"; url = url + "/service/"; url = url + Uri.EscapeDataString(serviceName); url = url + "/tenant/policy"; List<string> queryParameters = new List<string>(); queryParameters.Add("api-version=2016-07-07"); if (queryParameters.Count > 0) { url = url + "?" + string.Join("&", queryParameters); } string baseUrl = this.Client.BaseUri.AbsoluteUri; // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl[baseUrl.Length - 1] == '/') { baseUrl = baseUrl.Substring(0, baseUrl.Length - 1); } if (url[0] == '/') { url = url.Substring(1); } url = baseUrl + "/" + url; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Delete; httpRequest.RequestUri = new Uri(url); // Set Headers httpRequest.Headers.TryAddWithoutValidation("If-Match", etag); // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { TracingAdapter.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { TracingAdapter.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.OK && statusCode != HttpStatusCode.NoContent) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { TracingAdapter.Error(invocationId, ex); } throw ex; } // Create Result AzureOperationResponse result = null; // Deserialize Response result = new AzureOperationResponse(); result.StatusCode = statusCode; if (httpResponse.Headers.Contains("x-ms-request-id")) { result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (shouldTrace) { TracingAdapter.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } /// <summary> /// Gets tenant-level policy. /// </summary> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='serviceName'> /// Required. The name of the Api Management service. /// </param> /// <param name='format'> /// Required. Format of the policy. Supported formats: /// application/vnd.ms-azure-apim.policy+xml, /// application/vnd.ms-azure-apim.policy.raw+xml /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The response model for the get policy output operation. /// </returns> public async Task<PolicyGetResponse> GetAsync(string resourceGroupName, string serviceName, string format, CancellationToken cancellationToken) { // Validate if (resourceGroupName == null) { throw new ArgumentNullException("resourceGroupName"); } if (serviceName == null) { throw new ArgumentNullException("serviceName"); } if (format == null) { throw new ArgumentNullException("format"); } // Tracing bool shouldTrace = TracingAdapter.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = TracingAdapter.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("serviceName", serviceName); tracingParameters.Add("format", format); TracingAdapter.Enter(invocationId, this, "GetAsync", tracingParameters); } // Construct URL string url = ""; url = url + "/subscriptions/"; if (this.Client.Credentials.SubscriptionId != null) { url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId); } url = url + "/resourceGroups/"; url = url + Uri.EscapeDataString(resourceGroupName); url = url + "/providers/"; url = url + "Microsoft.ApiManagement"; url = url + "/service/"; url = url + Uri.EscapeDataString(serviceName); url = url + "/tenant/policy"; List<string> queryParameters = new List<string>(); queryParameters.Add("api-version=2016-07-07"); if (queryParameters.Count > 0) { url = url + "?" + string.Join("&", queryParameters); } string baseUrl = this.Client.BaseUri.AbsoluteUri; // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl[baseUrl.Length - 1] == '/') { baseUrl = baseUrl.Substring(0, baseUrl.Length - 1); } if (url[0] == '/') { url = url.Substring(1); } url = baseUrl + "/" + url; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Get; httpRequest.RequestUri = new Uri(url); // Set Headers httpRequest.Headers.Add("Accept", format); // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { TracingAdapter.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { TracingAdapter.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { TracingAdapter.Error(invocationId, ex); } throw ex; } // Create Result PolicyGetResponse result = null; // Deserialize Response if (statusCode == HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); result = new PolicyGetResponse(); result.PolicyBytes = Encoding.UTF8.GetBytes(responseContent); } result.StatusCode = statusCode; if (httpResponse.Headers.Contains("ETag")) { result.ETag = httpResponse.Headers.GetValues("ETag").FirstOrDefault(); } if (httpResponse.Headers.Contains("x-ms-request-id")) { result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (shouldTrace) { TracingAdapter.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } /// <summary> /// Sets tenant-level policy. /// </summary> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='serviceName'> /// Required. The name of the Api Management service. /// </param> /// <param name='format'> /// Required. Format of the policy. Supported formats: /// application/vnd.ms-azure-apim.policy+xml, /// application/vnd.ms-azure-apim.policy.raw+xml /// </param> /// <param name='policyStream'> /// Required. Policy stream. /// </param> /// <param name='etag'> /// Optional. ETag. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public async Task<AzureOperationResponse> SetAsync(string resourceGroupName, string serviceName, string format, Stream policyStream, string etag, CancellationToken cancellationToken) { // Validate if (resourceGroupName == null) { throw new ArgumentNullException("resourceGroupName"); } if (serviceName == null) { throw new ArgumentNullException("serviceName"); } if (format == null) { throw new ArgumentNullException("format"); } if (policyStream == null) { throw new ArgumentNullException("policyStream"); } // Tracing bool shouldTrace = TracingAdapter.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = TracingAdapter.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("serviceName", serviceName); tracingParameters.Add("format", format); tracingParameters.Add("policyStream", policyStream); tracingParameters.Add("etag", etag); TracingAdapter.Enter(invocationId, this, "SetAsync", tracingParameters); } // Construct URL string url = ""; url = url + "/subscriptions/"; if (this.Client.Credentials.SubscriptionId != null) { url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId); } url = url + "/resourceGroups/"; url = url + Uri.EscapeDataString(resourceGroupName); url = url + "/providers/"; url = url + "Microsoft.ApiManagement"; url = url + "/service/"; url = url + Uri.EscapeDataString(serviceName); url = url + "/tenant/policy"; List<string> queryParameters = new List<string>(); queryParameters.Add("api-version=2016-07-07"); if (queryParameters.Count > 0) { url = url + "?" + string.Join("&", queryParameters); } string baseUrl = this.Client.BaseUri.AbsoluteUri; // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl[baseUrl.Length - 1] == '/') { baseUrl = baseUrl.Substring(0, baseUrl.Length - 1); } if (url[0] == '/') { url = url.Substring(1); } url = baseUrl + "/" + url; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Put; httpRequest.RequestUri = new Uri(url); // Set Headers httpRequest.Headers.TryAddWithoutValidation("If-Match", etag); // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Serialize Request Stream requestContent = policyStream; httpRequest.Content = new StreamContent(requestContent); httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse(format); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { TracingAdapter.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { TracingAdapter.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.Created && statusCode != HttpStatusCode.NoContent) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { TracingAdapter.Error(invocationId, ex); } throw ex; } // Create Result AzureOperationResponse result = null; // Deserialize Response result = new AzureOperationResponse(); result.StatusCode = statusCode; if (httpResponse.Headers.Contains("x-ms-request-id")) { result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (shouldTrace) { TracingAdapter.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } } }
using UnityEngine; using System.Collections; using System.Collections.Generic; using System.Text.RegularExpressions; namespace UMA.AssetBundles { /// <summary> /// An AssetBunldeIndex containing a list of assetbundles each with a list of assets inside that asset bundle. The user can customize the fields of data that are stored for each asset. /// The entire class is marked as partial so that extra methods for searching the index can be added as necessary. /// </summary> public partial class AssetBundleIndex : ScriptableObject { /// <summary> /// The actual data that gets added to the index for any given asset. Made partial so the user can add extra fields to this as required; /// </summary> [System.Serializable] public partial class AssetBundleIndexItem { public string filename; public string assetName; public int assetHash; public string assetType; public AssetBundleIndexItem() { } /// <summary> /// Adds data about the given asset object to this given index item. /// Calls AddDataPreProcess before assigning any data and AddDataPostProcess afterwards, both of which are partial classes the user can use if necessary. /// </summary> /// <param name="_filename"></param> /// <param name="obj"></param> public virtual void AddData(string _filename, UnityEngine.Object obj) { AddDataPreProcess(_filename, obj); assetType = obj.GetType().ToString(); //deal with RuntimeAnimatorController Type craziness if (assetType == "UnityEditor.Animations.AnimatorController") { assetType = "UnityEngine.RuntimeAnimatorController"; } filename = _filename; if (assetType == "UMA.OverlayDataAsset" || assetType == "UMA.SlotDataAsset" || assetType == "UMA.RaceData" || assetType == "UMATextRecipe") { if (assetType == "UMA.RaceData") { assetName = (obj as UMA.RaceData).raceName; assetHash = UMA.UMAUtils.StringToHash((obj as UMA.RaceData).raceName); } else if (assetType == "UMA.OverlayDataAsset") { assetName = (obj as UMA.OverlayDataAsset).overlayName; assetHash = UMA.UMAUtils.StringToHash((obj as UMA.OverlayDataAsset).overlayName); } else if (assetType == "UMA.SlotDataAsset") { assetName = (obj as UMA.SlotDataAsset).slotName; assetHash = (obj as UMA.SlotDataAsset).nameHash; } else if (assetType == "UMATextRecipe") { assetName = _filename; assetHash = UMA.UMAUtils.StringToHash(filename); } } else { assetName = _filename; assetHash = UMA.UMAUtils.StringToHash(filename); } AddDataPostProcess(_filename, obj); } /// <summary> /// Impliment this method to run any extra code before data gets added to the index item in AddData /// </summary> /// <param name="filename"></param> /// <param name="obj"></param> partial void AddDataPreProcess(string filename, UnityEngine.Object obj); /// <summary> /// Impliment this method to run any extra code after data has been added to the index item in AddData /// </summary> /// <param name="filename"></param> /// <param name="obj"></param> partial void AddDataPostProcess(string filename, UnityEngine.Object obj); } /// <summary> /// A list of the available assetbundles each conatining a list of all the assets in that bundle. Marked as partial so this can be extended if necessary. /// </summary> [System.Serializable] public partial class AssetBundleIndexList { public string assetBundleName; public List<AssetBundleIndexItem> assetBundleAssets = new List<AssetBundleIndexItem>(); public string[] allDependencies; public string[] directDependencies; public string assetBundleHash; public string encryptedName = ""; public AssetBundleIndexList(string _assetBundleName) { assetBundleName = _assetBundleName; } public AssetBundleIndexList(string _assetBundleName, string _encryptedName) { assetBundleName = _assetBundleName; encryptedName = _encryptedName; } /// <summary> /// Adds an AssetBundleIndexItem to the list of assetBundleAssets with the given filename. /// </summary> /// <param name="filename"></param> /// <param name="obj"></param> public void AddItem(string filename, UnityEngine.Object obj) { AssetBundleIndexItem thisItem = new AssetBundleIndexItem(); thisItem.AddData(filename, obj); assetBundleAssets.Add(thisItem); } } [SerializeField] public string ownBundleHash; [SerializeField] public List<AssetBundleIndexList> bundlesIndex = new List<AssetBundleIndexList>(); [SerializeField] public string[] bundlesWithVariant; public AssetBundleIndex() { } public AssetBundleIndexList GetIndexItem(string assetBundleName) { for (int i = 0; i < bundlesIndex.Count; i++) { if (bundlesIndex[i].assetBundleName == assetBundleName) { return bundlesIndex[i]; } } return null; } public string GetAssetBundleEncryptedName(string assetBundleName) { for (int i = 0; i < bundlesIndex.Count; i++) { if (bundlesIndex[i].assetBundleName == assetBundleName) { if (bundlesIndex[i].encryptedName != "") return bundlesIndex[i].encryptedName; else return bundlesIndex[i].assetBundleName;//do we do that? } } return null; } #region AssetBundleManifest clone methods //These methods are replicas of the AssetBundleManifest methods so that we can just use this Index in place of the manifest public string[] GetAllAssetBundles() { return GetAllAssetBundleNames(); } public Hash128 GetAssetBundleHash(string assetBundleName) { Hash128 hash = new Hash128(); for (int i = 0; i < bundlesIndex.Count; i++) { if (bundlesIndex[i].assetBundleName == assetBundleName) { hash = Hash128.Parse(bundlesIndex[i].assetBundleHash); } } return hash; } //TODO work out what this actually is and how its made so we can recreate it server side public string[] GetAllAssetBundlesWithVariant() { return bundlesWithVariant; } public string[] GetAllDependencies(string assetBundleName) { string[] deps = new string[0]; for (int i = 0; i < bundlesIndex.Count; i++) { if (bundlesIndex[i].assetBundleName == assetBundleName) { deps = bundlesIndex[i].allDependencies; } } return deps; } public string[] GetDirectDependencies(string assetBundleName) { string[] deps = new string[0]; for (int i = 0; i < bundlesIndex.Count; i++) { if (bundlesIndex[i].assetBundleName == assetBundleName) { deps = bundlesIndex[i].directDependencies; } } return deps; } #endregion /// <summary> /// Replicates AssetDatabase.GetAllAssetBundleNames() method. Gets the names of all available asset bundles. /// </summary> /// <returns>String array of all available bundles.</returns> public string[] GetAllAssetBundleNames() { List<string> assetBundleNames = new List<string>(); foreach (AssetBundleIndexList iAssetList in bundlesIndex) { assetBundleNames.Add(iAssetList.assetBundleName); } return assetBundleNames.ToArray(); } /// <summary> /// Replicates AssetBundle.Contains but adds an optional type filter /// </summary> /// <param name="assetBundleName"></param> /// <param name="assetName"></param> /// <param name="type"></param> /// <returns></returns> public bool AssetBundleContains(string assetBundleName, string assetName, string type = "") { bool assetFound = false; if (GetAssetBundleIndexItem(assetBundleName, assetName, type) != null) { assetFound = true; } return assetFound; } /// <summary> /// Replicates AssetBundle.Contains but uses assetNameHash and adds an optional type filter /// </summary> /// <param name="assetBundleName"></param> /// <param name="assetName"></param> /// <param name="type"></param> /// <returns></returns> public bool AssetBundleContains(string assetBundleName, int? assetHash, string type = "") { bool assetFound = false; if (GetAssetBundleIndexItem(assetBundleName, assetHash, type) != null) { assetFound = true; } return assetFound; } /// <summary> /// Searches the available AssetBundles for the given assetName optionally filtered by type /// </summary> /// <param name="assetName"></param> /// <param name="type"></param> /// <returns></returns> public string[] FindContainingAssetBundle(string assetNameOrFilename, string type = "") { List<string> assetFoundIn = new List<string>(); for (int i = 0; i < bundlesIndex.Count; i++) { for (int ii = 0; ii < bundlesIndex[i].assetBundleAssets.Count; ii++) { if (assetNameOrFilename == bundlesIndex[i].assetBundleAssets[ii].assetName) { if (type == "" || (type != "" && (type == bundlesIndex[i].assetBundleAssets[ii].assetType || type == GetTypeWithoutAssembly(bundlesIndex[i].assetBundleAssets[ii].assetType)))) { assetFoundIn.Add(bundlesIndex[i].assetBundleName); } } } } //if we didn't find it check the filename? if (assetFoundIn.Count == 0) { for (int i = 0; i < bundlesIndex.Count; i++) { for (int ii = 0; ii < bundlesIndex[i].assetBundleAssets.Count; ii++) { if (assetNameOrFilename == bundlesIndex[i].assetBundleAssets[ii].filename) { if (type == "" || (type != "" && (type == bundlesIndex[i].assetBundleAssets[ii].assetType || type == GetTypeWithoutAssembly(bundlesIndex[i].assetBundleAssets[ii].assetType)))) { assetFoundIn.Add(bundlesIndex[i].assetBundleName); } } } } } return assetFoundIn.ToArray(); } /// <summary> /// Searches the available AssetBundles for the given assetNameHash optionally filtered by type (type may be un-necessary it depends how unique the hashes are) /// </summary> /// <param name="assetNameHash"></param> /// <param name="type"></param> /// <returns></returns> public string[] FindContainingAssetBundle(int? assetNameHash, string type = "") { List<string> assetFoundIn = new List<string>(); for (int i = 0; i < bundlesIndex.Count; i++) { for (int ii = 0; ii < bundlesIndex[i].assetBundleAssets.Count; ii++) { if (assetNameHash == bundlesIndex[i].assetBundleAssets[ii].assetHash) { if (type == "" || (type != "" && (type == bundlesIndex[i].assetBundleAssets[ii].assetType || type == GetTypeWithoutAssembly(bundlesIndex[i].assetBundleAssets[ii].assetType)))) { assetFoundIn.Add(bundlesIndex[i].assetBundleName); } } } } return assetFoundIn.ToArray(); } /// <summary> /// Gets all the assets of a particular type that are contained in the given asset bundle /// </summary> /// <param name="assetBundleName"></param> /// <param name="type"></param> /// <returns></returns> public string[] GetAllAssetsOfTypeInBundle(string assetBundleName, string type) { List<string> foundAssets = new List<string>(); foreach (AssetBundleIndexList iAssetList in bundlesIndex) { if (iAssetList.assetBundleName == assetBundleName) { foreach (AssetBundleIndexItem iAsset in iAssetList.assetBundleAssets) { if (type == "" || (type != "" && (type == iAsset.assetType || type == GetTypeWithoutAssembly(iAsset.assetType)))) { foundAssets.Add(iAsset.assetName); } } } } return foundAssets.ToArray(); } public AssetBundleIndexItem GetAssetBundleIndexItem(string assetBundleName, string assetNameOrFilename, string type = "") { AssetBundleIndexItem indexAsset = null; foreach (AssetBundleIndexList iAssetList in bundlesIndex) { if (indexAsset != null) break; if (iAssetList.assetBundleName == assetBundleName) { foreach (AssetBundleIndexItem iAsset in iAssetList.assetBundleAssets) { if (assetNameOrFilename == iAsset.assetName) { if (type == "" || (type != "" && (type == iAsset.assetType || type == GetTypeWithoutAssembly(iAsset.assetType)))) { indexAsset = iAsset; } } else if (assetNameOrFilename == iAsset.filename) { if (type == "" || (type != "" && (type == iAsset.assetType || type == GetTypeWithoutAssembly(iAsset.assetType)))) { indexAsset = iAsset; } } if (indexAsset != null) break; } } } return indexAsset; } public AssetBundleIndexItem GetAssetBundleIndexItem(string assetBundleName, int? assetNameHash, string type = "") { AssetBundleIndexItem indexAsset = null; foreach (AssetBundleIndexList iAssetList in bundlesIndex) { if (indexAsset != null) break; if (iAssetList.assetBundleName == assetBundleName) { foreach (AssetBundleIndexItem iAsset in iAssetList.assetBundleAssets) { if (assetNameHash == iAsset.assetHash) { if (type == "" || (type != "" && (type == iAsset.assetType || type == GetTypeWithoutAssembly(iAsset.assetType)))) { indexAsset = iAsset; } } if (indexAsset != null) break; } } } return indexAsset; } public string GetFilenameFromAssetName(string assetBundleName, string assetname, string type = "") { return GetAssetBundleIndexItem(assetBundleName, assetname, type).filename; } public string GetAssetNameFromFilename(string filename, string type = "") { string assetName = ""; string[] foundInBundles = FindContainingAssetBundle(filename, type); if (foundInBundles.Length > 0) { assetName = GetAssetBundleIndexItem(foundInBundles[0], filename, type).assetName; } return assetName; } public string GetAssetNameFromFilename(string assetBundleName, string filename, string type = "") { string assetName = ""; assetName = GetAssetBundleIndexItem(assetBundleName, filename, type).assetName; return assetName; } public string GetAssetNameFromHash(int? assetNameHash, string type = "") { string assetName = ""; string[] foundInBundles = FindContainingAssetBundle(assetNameHash, type); if (foundInBundles.Length > 0) { assetName = GetAssetBundleIndexItem(foundInBundles[0], assetNameHash, type).assetName; } return assetName; } public string GetAssetNameFromHash(string assetBundleName, int? assetNameHash, string type = "") { string assetName = ""; assetName = GetAssetBundleIndexItem(assetBundleName, assetNameHash, type).assetName; return assetName; } public int? GetAssetHashFromName(string assetBundleName, string assetName, string type = "") { int? assetNameHash = null; assetNameHash = (int?)GetAssetBundleIndexItem(assetBundleName, assetName, type).assetHash; return assetNameHash; } static string GetTypeWithoutAssembly(string fullType) { var typeParts = fullType.Split(new string[1] { "." }, System.StringSplitOptions.None); string typeWithoutAssembly = typeParts[typeParts.Length - 1]; return typeWithoutAssembly; } } }
// MIT License // // Copyright (c) 2009-2017 Luca Piccioni // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. // // This file is automatically generated #pragma warning disable 649, 1572, 1573 // ReSharper disable RedundantUsingDirective using System; using System.Diagnostics; using System.Runtime.InteropServices; using System.Security; using System.Text; using Khronos; // ReSharper disable CheckNamespace // ReSharper disable InconsistentNaming // ReSharper disable JoinDeclarationAndInitializer namespace OpenGL { public partial class Wgl { /// <summary> /// [WGL] wglJoinSwapGroupNV: Binding for wglJoinSwapGroupNV. /// </summary> /// <param name="hDC"> /// A <see cref="T:IntPtr"/>. /// </param> /// <param name="group"> /// A <see cref="T:uint"/>. /// </param> [RequiredByFeature("WGL_NV_swap_group")] public static bool JoinSwapGroupNV(IntPtr hDC, uint group) { bool retValue; Debug.Assert(Delegates.pwglJoinSwapGroupNV != null, "pwglJoinSwapGroupNV not implemented"); retValue = Delegates.pwglJoinSwapGroupNV(hDC, group); LogCommand("wglJoinSwapGroupNV", retValue, hDC, group ); DebugCheckErrors(retValue); return (retValue); } /// <summary> /// [WGL] wglBindSwapBarrierNV: Binding for wglBindSwapBarrierNV. /// </summary> /// <param name="group"> /// A <see cref="T:uint"/>. /// </param> /// <param name="barrier"> /// A <see cref="T:uint"/>. /// </param> [RequiredByFeature("WGL_NV_swap_group")] public static bool BindSwapBarrierNV(uint group, uint barrier) { bool retValue; Debug.Assert(Delegates.pwglBindSwapBarrierNV != null, "pwglBindSwapBarrierNV not implemented"); retValue = Delegates.pwglBindSwapBarrierNV(group, barrier); LogCommand("wglBindSwapBarrierNV", retValue, group, barrier ); DebugCheckErrors(retValue); return (retValue); } /// <summary> /// [WGL] wglQuerySwapGroupNV: Binding for wglQuerySwapGroupNV. /// </summary> /// <param name="hDC"> /// A <see cref="T:IntPtr"/>. /// </param> /// <param name="group"> /// A <see cref="T:uint[]"/>. /// </param> /// <param name="barrier"> /// A <see cref="T:uint[]"/>. /// </param> [RequiredByFeature("WGL_NV_swap_group")] public static bool QuerySwapGroupNV(IntPtr hDC, uint[] group, uint[] barrier) { bool retValue; unsafe { fixed (uint* p_group = group) fixed (uint* p_barrier = barrier) { Debug.Assert(Delegates.pwglQuerySwapGroupNV != null, "pwglQuerySwapGroupNV not implemented"); retValue = Delegates.pwglQuerySwapGroupNV(hDC, p_group, p_barrier); LogCommand("wglQuerySwapGroupNV", retValue, hDC, group, barrier ); } } DebugCheckErrors(retValue); return (retValue); } /// <summary> /// [WGL] wglQueryMaxSwapGroupsNV: Binding for wglQueryMaxSwapGroupsNV. /// </summary> /// <param name="hDC"> /// A <see cref="T:IntPtr"/>. /// </param> /// <param name="maxGroups"> /// A <see cref="T:uint[]"/>. /// </param> /// <param name="maxBarriers"> /// A <see cref="T:uint[]"/>. /// </param> [RequiredByFeature("WGL_NV_swap_group")] public static bool QueryMaxSwapGroupsNV(IntPtr hDC, uint[] maxGroups, uint[] maxBarriers) { bool retValue; unsafe { fixed (uint* p_maxGroups = maxGroups) fixed (uint* p_maxBarriers = maxBarriers) { Debug.Assert(Delegates.pwglQueryMaxSwapGroupsNV != null, "pwglQueryMaxSwapGroupsNV not implemented"); retValue = Delegates.pwglQueryMaxSwapGroupsNV(hDC, p_maxGroups, p_maxBarriers); LogCommand("wglQueryMaxSwapGroupsNV", retValue, hDC, maxGroups, maxBarriers ); } } DebugCheckErrors(retValue); return (retValue); } /// <summary> /// [WGL] wglQueryFrameCountNV: Binding for wglQueryFrameCountNV. /// </summary> /// <param name="hDC"> /// A <see cref="T:IntPtr"/>. /// </param> /// <param name="count"> /// A <see cref="T:uint[]"/>. /// </param> [RequiredByFeature("WGL_NV_swap_group")] public static bool QueryFrameCountNV(IntPtr hDC, uint[] count) { bool retValue; unsafe { fixed (uint* p_count = count) { Debug.Assert(Delegates.pwglQueryFrameCountNV != null, "pwglQueryFrameCountNV not implemented"); retValue = Delegates.pwglQueryFrameCountNV(hDC, p_count); LogCommand("wglQueryFrameCountNV", retValue, hDC, count ); } } DebugCheckErrors(retValue); return (retValue); } /// <summary> /// [WGL] wglResetFrameCountNV: Binding for wglResetFrameCountNV. /// </summary> /// <param name="hDC"> /// A <see cref="T:IntPtr"/>. /// </param> [RequiredByFeature("WGL_NV_swap_group")] public static bool ResetFrameCountNV(IntPtr hDC) { bool retValue; Debug.Assert(Delegates.pwglResetFrameCountNV != null, "pwglResetFrameCountNV not implemented"); retValue = Delegates.pwglResetFrameCountNV(hDC); LogCommand("wglResetFrameCountNV", retValue, hDC ); DebugCheckErrors(retValue); return (retValue); } internal static unsafe partial class Delegates { [RequiredByFeature("WGL_NV_swap_group")] [SuppressUnmanagedCodeSecurity] internal delegate bool wglJoinSwapGroupNV(IntPtr hDC, uint group); [RequiredByFeature("WGL_NV_swap_group")] internal static wglJoinSwapGroupNV pwglJoinSwapGroupNV; [RequiredByFeature("WGL_NV_swap_group")] [SuppressUnmanagedCodeSecurity] internal delegate bool wglBindSwapBarrierNV(uint group, uint barrier); [RequiredByFeature("WGL_NV_swap_group")] internal static wglBindSwapBarrierNV pwglBindSwapBarrierNV; [RequiredByFeature("WGL_NV_swap_group")] [SuppressUnmanagedCodeSecurity] internal delegate bool wglQuerySwapGroupNV(IntPtr hDC, uint* group, uint* barrier); [RequiredByFeature("WGL_NV_swap_group")] internal static wglQuerySwapGroupNV pwglQuerySwapGroupNV; [RequiredByFeature("WGL_NV_swap_group")] [SuppressUnmanagedCodeSecurity] internal delegate bool wglQueryMaxSwapGroupsNV(IntPtr hDC, uint* maxGroups, uint* maxBarriers); [RequiredByFeature("WGL_NV_swap_group")] internal static wglQueryMaxSwapGroupsNV pwglQueryMaxSwapGroupsNV; [RequiredByFeature("WGL_NV_swap_group")] [SuppressUnmanagedCodeSecurity] internal delegate bool wglQueryFrameCountNV(IntPtr hDC, uint* count); [RequiredByFeature("WGL_NV_swap_group")] internal static wglQueryFrameCountNV pwglQueryFrameCountNV; [RequiredByFeature("WGL_NV_swap_group")] [SuppressUnmanagedCodeSecurity] internal delegate bool wglResetFrameCountNV(IntPtr hDC); [RequiredByFeature("WGL_NV_swap_group")] internal static wglResetFrameCountNV pwglResetFrameCountNV; } } }
// Copyright (c) .NET Foundation. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for details. using System; using System.Collections; using System.ComponentModel; using System.Diagnostics; using System.Drawing; using System.Data; using System.Globalization; using System.Windows.Forms; using OpenLiveWriter.CoreServices; using OpenLiveWriter.Localization; namespace OpenLiveWriter.InternalWriterPlugin.Controls { internal class MapBirdsEyeDirectionControl : System.Windows.Forms.UserControl { /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.Container components = null; private Bitmap _backgroundBitmap; private const int BUTTON_INSET = 6; private class DirectionalButton : MapBitmapButton { public DirectionalButton(VEOrientation direction, string directionName) : base(String.Format(CultureInfo.InvariantCulture, "BEV{0}", directionName)) { _direction = direction; } public VEOrientation Direction { get { return _direction; } } private VEOrientation _direction; } private class NorthButton : DirectionalButton { public NorthButton() : base(VEOrientation.North, "North") { } } private class EastButton : DirectionalButton { public EastButton() : base(VEOrientation.East, "East") { } } private class SouthButton : DirectionalButton { public SouthButton() : base(VEOrientation.South, "South") { } } private class WestButton : DirectionalButton { public WestButton() : base(VEOrientation.West, "West") { } } private NorthButton _northButton = new NorthButton(); private SouthButton _southButton = new SouthButton(); private EastButton _eastButton = new EastButton(); private WestButton _westButton = new WestButton(); private Bitmap _centerArrowBitmap; public MapBirdsEyeDirectionControl() { // Turn on double buffered painting. SetStyle(ControlStyles.UserPaint, true); SetStyle(ControlStyles.DoubleBuffer, true); SetStyle(ControlStyles.AllPaintingInWmPaint, true); // This call is required by the Windows.Forms Form Designer. InitializeComponent(); //set accessible names _northButton.AccessibleName = Res.Get(StringId.MapDirectionNorth); _eastButton.AccessibleName = Res.Get(StringId.MapDirectionEast); _southButton.AccessibleName = Res.Get(StringId.MapDirectionSouth); _westButton.AccessibleName = Res.Get(StringId.MapDirectionWest); _arrowBitmaps[0] = ResourceHelper.LoadAssemblyResourceBitmap("Images.PointNorth.png"); _arrowBitmaps[1] = ResourceHelper.LoadAssemblyResourceBitmap("Images.PointEast.png"); _arrowBitmaps[2] = ResourceHelper.LoadAssemblyResourceBitmap("Images.PointSouth.png"); _arrowBitmaps[3] = ResourceHelper.LoadAssemblyResourceBitmap("Images.PointWest.png"); // initialize the background bitmap _backgroundBitmap = ResourceHelper.LoadAssemblyResourceBitmap("Images.BEVNSEWFrame.png"); Width = _backgroundBitmap.Width + (BUTTON_INSET * 2); Height = _backgroundBitmap.Height + (BUTTON_INSET * 2); _buttons[0] = _northButton; _buttons[1] = _eastButton; _buttons[2] = _southButton; _buttons[3] = _westButton; RecalcPositions(); // add the directional buttons and subscribe to their click event Controls.Add(_northButton); _northButton.Click += new EventHandler(_directionalButton_Click); Controls.Add(_eastButton); _eastButton.Click += new EventHandler(_directionalButton_Click); Controls.Add(_southButton); _southButton.Click += new EventHandler(_directionalButton_Click); Controls.Add(_westButton); _westButton.Click += new EventHandler(_directionalButton_Click); // handle mouse events that control 'hot-tracking' this.MouseLeave += new EventHandler(arrowHotTrack_MouseLeave); _northButton.MouseLeave += new EventHandler(arrowHotTrack_MouseLeave); _northButton.MouseEnter += new EventHandler(arrowHotTrack_MouseEnter); _eastButton.MouseLeave += new EventHandler(arrowHotTrack_MouseLeave); _eastButton.MouseEnter += new EventHandler(arrowHotTrack_MouseEnter); _southButton.MouseLeave += new EventHandler(arrowHotTrack_MouseLeave); _southButton.MouseEnter += new EventHandler(arrowHotTrack_MouseEnter); _westButton.MouseLeave += new EventHandler(arrowHotTrack_MouseLeave); _westButton.MouseEnter += new EventHandler(arrowHotTrack_MouseEnter); // default to North Direction = VEOrientation.North; } private void RecalcPositions() { int centerX = (Width / 2) - (_northButton.Width / 2); int centerY = (Height / 2) - (_eastButton.Height / 2); _buttonLocations[0] = new Point(centerX, 0); _buttonLocations[1] = new Point(Width - _eastButton.Width, centerY); _buttonLocations[2] = new Point(centerX, Height - _southButton.Height); _buttonLocations[3] = new Point(0, centerY); } private Point[] _buttonLocations = new Point[4]; private DirectionalButton[] _buttons = new DirectionalButton[4]; private Bitmap[] _arrowBitmaps = new Bitmap[4]; private void PositionButtons(int iTopButton) { int iLocation = 0; for (int i = iTopButton; i < _buttons.Length; i++) _buttons[i].Location = _buttonLocations[iLocation++]; for (int i = 0; i < iTopButton; i++) _buttons[i].Location = _buttonLocations[iLocation++]; } [DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)] public VEOrientation Direction { get { return _mapDirection; } set { _mapDirection = value; RepositionButtons(); if (DirectionChanged != null) DirectionChanged(this, EventArgs.Empty); } } private void RepositionButtons() { // flip the buttons around switch (_mapDirection) { case VEOrientation.North: PositionButtons(0); break; case VEOrientation.East: PositionButtons(1); break; case VEOrientation.South: PositionButtons(2); break; case VEOrientation.West: PositionButtons(3); break; } // always go back to north facing button CenterArrowBitmap = _arrowBitmaps[0]; Invalidate(); } private VEOrientation _mapDirection; public event EventHandler DirectionChanged; protected override void SetBoundsCore(int x, int y, int width, int height, BoundsSpecified specified) { base.SetBoundsCore(x, y, ScaleX(_backgroundBitmap.Width + (BUTTON_INSET * 2)), ScaleY(_backgroundBitmap.Height + (BUTTON_INSET * 2)), specified); } protected override void OnPaint(PaintEventArgs e) { base.OnPaint(e); e.Graphics.DrawImage(_backgroundBitmap, ScaleX(BUTTON_INSET), ScaleY(BUTTON_INSET), ScaleX(_backgroundBitmap.Width), ScaleY(_backgroundBitmap.Height)); if (CenterArrowBitmap != null) { e.Graphics.DrawImage( CenterArrowBitmap, (Width / 2) - (ScaleX(CenterArrowBitmap.Width) / 2), (Height / 2) - (ScaleY(CenterArrowBitmap.Height) / 2), ScaleX(CenterArrowBitmap.Width), ScaleY(CenterArrowBitmap.Height)); } } private Bitmap CenterArrowBitmap { get { return _centerArrowBitmap; } set { _centerArrowBitmap = value; Invalidate(); } } private void _directionalButton_Click(object sender, EventArgs e) { _supressNextEnter = true; Direction = (sender as DirectionalButton).Direction; } private void arrowHotTrack_MouseEnter(object sender, EventArgs e) { if (!_supressNextEnter) { DirectionalButton button = sender as DirectionalButton; for (int i = 0; i < _buttonLocations.Length; i++) { if (button.Location == _buttonLocations[i]) { CenterArrowBitmap = _arrowBitmaps[i]; break; } } } else { _supressNextEnter = false; } } private void arrowHotTrack_MouseLeave(object sender, EventArgs e) { CenterArrowBitmap = _arrowBitmaps[0]; } private bool _supressNextEnter = false; /// <summary> /// Clean up any resources being used. /// </summary> protected override void Dispose(bool disposing) { if (disposing) { if (components != null) { components.Dispose(); } } base.Dispose(disposing); } #region Component Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { components = new System.ComponentModel.Container(); this.RightToLeft = RightToLeft.No; } #endregion #region High DPI Scaling protected override void ScaleControl(SizeF factor, BoundsSpecified specified) { SaveScaleState(factor.Width, factor.Height); base.ScaleControl(factor, specified); RecalcPositions(); RepositionButtons(); } protected override void ScaleCore(float dx, float dy) { SaveScaleState(dx, dy); base.ScaleCore(dx, dy); RecalcPositions(); RepositionButtons(); } protected override bool ScaleChildren { get { return false; } } private void SaveScaleState(float dx, float dy) { scale = new PointF(scale.X * dx, scale.Y * dy); } private PointF scale = new PointF(1f, 1f); protected int ScaleX(int x) { return (int)(x * scale.X); } protected int ScaleY(int y) { return (int)(y * scale.Y); } protected Point ScaleLocation(Point p, float dx, float dy) { p.X = (int)(p.X * dx); p.Y = (int)(p.Y * dy); return p; } #endregion } }
/************************************************************************************ Copyright : Copyright 2014 Oculus VR, LLC. All Rights reserved. Licensed under the Oculus VR Rift SDK License Version 3.2 (the "License"); you may not use the Oculus VR Rift SDK except in compliance with the License, which is provided at the time of installation or download, or which otherwise accompanies this software in either electronic or hard copy form. You may obtain a copy of the License at http://www.oculusvr.com/licenses/LICENSE-3.2 Unless required by applicable law or agreed to in writing, the Oculus VR SDK distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ************************************************************************************/ using UnityEngine; using System.Collections.Generic; /// <summary> /// Controls the player's movement in virtual reality. /// </summary> [RequireComponent(typeof(CharacterController))] public class OVRPlayerController : MonoBehaviour { /// <summary> /// The rate acceleration during movement. /// </summary> public float Acceleration = 0.1f; /// <summary> /// The rate of damping on movement. /// </summary> public float Damping = 0.3f; /// <summary> /// The rate of additional damping when moving sideways or backwards. /// </summary> public float BackAndSideDampen = 0.5f; /// <summary> /// The force applied to the character when jumping. /// </summary> public float JumpForce = 0.3f; /// <summary> /// The rate of rotation when using a gamepad. /// </summary> public float RotationAmount = 1.5f; /// <summary> /// The rate of rotation when using the keyboard. /// </summary> public float RotationRatchet = 45.0f; /// <summary> /// The player's current rotation about the Y axis. /// </summary> private float YRotation = 0.0f; /// <summary> /// If true, tracking data from a child OVRCameraRig will update the direction of movement. /// </summary> public bool HmdRotatesY = true; /// <summary> /// Modifies the strength of gravity. /// </summary> public float GravityModifier = 0.379f; private float MoveScale = 1.0f; private Vector3 MoveThrottle = Vector3.zero; private float FallSpeed = 0.0f; private OVRPose? InitialPose; /// <summary> /// If true, each OVRPlayerController will use the player's physical height. /// </summary> public bool useProfileHeight = true; protected CharacterController Controller = null; protected OVRCameraRig CameraController = null; private float MoveScaleMultiplier = 1.0f; private float RotationScaleMultiplier = 1.0f; private bool SkipMouseRotation = false; private bool HaltUpdateMovement = false; private bool prevHatLeft = false; private bool prevHatRight = false; private float SimulationRate = 60f; void Awake() { Controller = gameObject.GetComponent<CharacterController>(); if(Controller == null) Debug.LogWarning("OVRPlayerController: No CharacterController attached."); // We use OVRCameraRig to set rotations to cameras, // and to be influenced by rotation OVRCameraRig[] CameraControllers; CameraControllers = gameObject.GetComponentsInChildren<OVRCameraRig>(); if(CameraControllers.Length == 0) Debug.LogWarning("OVRPlayerController: No OVRCameraRig attached."); else if (CameraControllers.Length > 1) Debug.LogWarning("OVRPlayerController: More then 1 OVRCameraRig attached."); else CameraController = CameraControllers[0]; YRotation = transform.rotation.eulerAngles.y; #if UNITY_ANDROID && !UNITY_EDITOR OVRManager.display.RecenteredPose += ResetOrientation; #endif } protected virtual void Update() { if (useProfileHeight) { if (InitialPose == null) { InitialPose = new OVRPose() { position = CameraController.transform.localPosition, orientation = CameraController.transform.localRotation }; } var p = CameraController.transform.localPosition; p.y = OVRManager.profile.eyeHeight - 0.5f * Controller.height; p.z = OVRManager.profile.eyeDepth; CameraController.transform.localPosition = p; } else if (InitialPose != null) { CameraController.transform.localPosition = InitialPose.Value.position; CameraController.transform.localRotation = InitialPose.Value.orientation; InitialPose = null; } UpdateMovement(); Vector3 moveDirection = Vector3.zero; float motorDamp = (1.0f + (Damping * SimulationRate * Time.deltaTime)); MoveThrottle.x /= motorDamp; MoveThrottle.y = (MoveThrottle.y > 0.0f) ? (MoveThrottle.y / motorDamp) : MoveThrottle.y; MoveThrottle.z /= motorDamp; moveDirection += MoveThrottle * SimulationRate * Time.deltaTime; // Gravity if (Controller.isGrounded && FallSpeed <= 0) FallSpeed = ((Physics.gravity.y * (GravityModifier * 0.002f))); else FallSpeed += ((Physics.gravity.y * (GravityModifier * 0.002f)) * SimulationRate * Time.deltaTime); moveDirection.y += FallSpeed * SimulationRate * Time.deltaTime; // Offset correction for uneven ground float bumpUpOffset = 0.0f; if (Controller.isGrounded && MoveThrottle.y <= 0.001f) { bumpUpOffset = Mathf.Max(Controller.stepOffset, new Vector3(moveDirection.x, 0, moveDirection.z).magnitude); moveDirection -= bumpUpOffset * Vector3.up; } Vector3 predictedXZ = Vector3.Scale((Controller.transform.localPosition + moveDirection), new Vector3(1, 0, 1)); // Move contoller Controller.Move(moveDirection); Vector3 actualXZ = Vector3.Scale(Controller.transform.localPosition, new Vector3(1, 0, 1)); if (predictedXZ != actualXZ) MoveThrottle += (actualXZ - predictedXZ) / (SimulationRate * Time.deltaTime); } public virtual void UpdateMovement() { if (HaltUpdateMovement) return; bool moveForward = Input.GetKey(KeyCode.W) || Input.GetKey(KeyCode.UpArrow); bool moveLeft = Input.GetKey(KeyCode.A) || Input.GetKey(KeyCode.LeftArrow); bool moveRight = Input.GetKey(KeyCode.D) || Input.GetKey(KeyCode.RightArrow); bool moveBack = Input.GetKey(KeyCode.S) || Input.GetKey(KeyCode.DownArrow); if (Input.GetKeyDown(KeyCode.Space)) Jump(); bool dpad_move = false; if (OVRGamepadController.GPC_GetButton(OVRGamepadController.Button.Up)) { moveForward = true; dpad_move = true; } if (OVRGamepadController.GPC_GetButton(OVRGamepadController.Button.Down)) { moveBack = true; dpad_move = true; } MoveScale = 1.0f; if ( (moveForward && moveLeft) || (moveForward && moveRight) || (moveBack && moveLeft) || (moveBack && moveRight) ) MoveScale = 0.70710678f; // No positional movement if we are in the air if (!Controller.isGrounded) MoveScale = 0.0f; MoveScale *= SimulationRate * Time.deltaTime; // Compute this for key movement float moveInfluence = Acceleration * 0.1f * MoveScale * MoveScaleMultiplier; // Run! if (dpad_move || Input.GetKey(KeyCode.LeftShift) || Input.GetKey(KeyCode.RightShift)) moveInfluence *= 3.0f; Quaternion ort = (HmdRotatesY) ? CameraController.centerEyeAnchor.rotation : transform.rotation; Vector3 ortEuler = ort.eulerAngles; ortEuler.z = ortEuler.x = 0f; ort = Quaternion.Euler(ortEuler); if (moveForward) MoveThrottle += ort * (transform.lossyScale.z * moveInfluence * Vector3.forward); if (moveBack) MoveThrottle += ort * (transform.lossyScale.z * moveInfluence * BackAndSideDampen * Vector3.back); if (moveLeft) MoveThrottle += ort * (transform.lossyScale.x * moveInfluence * BackAndSideDampen * Vector3.left); if (moveRight) MoveThrottle += ort * (transform.lossyScale.x * moveInfluence * BackAndSideDampen * Vector3.right); bool curHatLeft = OVRGamepadController.GPC_GetButton(OVRGamepadController.Button.LeftShoulder); Vector3 euler = transform.rotation.eulerAngles; if (curHatLeft && !prevHatLeft) euler.y -= RotationRatchet; prevHatLeft = curHatLeft; bool curHatRight = OVRGamepadController.GPC_GetButton(OVRGamepadController.Button.RightShoulder); if(curHatRight && !prevHatRight) euler.y += RotationRatchet; prevHatRight = curHatRight; //Use keys to ratchet rotation if (Input.GetKeyDown(KeyCode.Q)) euler.y -= RotationRatchet; if (Input.GetKeyDown(KeyCode.E)) euler.y += RotationRatchet; float rotateInfluence = SimulationRate * Time.deltaTime * RotationAmount * RotationScaleMultiplier; if (!SkipMouseRotation) euler.y += Input.GetAxis("Mouse X") * rotateInfluence * 3.25f; moveInfluence = SimulationRate * Time.deltaTime * Acceleration * 0.1f * MoveScale * MoveScaleMultiplier; #if !UNITY_ANDROID // LeftTrigger not avail on Android game pad moveInfluence *= 1.0f + OVRGamepadController.GPC_GetAxis(OVRGamepadController.Axis.LeftTrigger); #endif float leftAxisX = OVRGamepadController.GPC_GetAxis(OVRGamepadController.Axis.LeftXAxis); float leftAxisY = OVRGamepadController.GPC_GetAxis(OVRGamepadController.Axis.LeftYAxis); if(leftAxisY > 0.0f) MoveThrottle += ort * (leftAxisY * moveInfluence * Vector3.forward); if(leftAxisY < 0.0f) MoveThrottle += ort * (Mathf.Abs(leftAxisY) * moveInfluence * BackAndSideDampen * Vector3.back); if(leftAxisX < 0.0f) MoveThrottle += ort * (Mathf.Abs(leftAxisX) * moveInfluence * BackAndSideDampen * Vector3.left); if(leftAxisX > 0.0f) MoveThrottle += ort * (leftAxisX * moveInfluence * BackAndSideDampen * Vector3.right); float rightAxisX = OVRGamepadController.GPC_GetAxis(OVRGamepadController.Axis.RightXAxis); euler.y += rightAxisX * rotateInfluence; transform.rotation = Quaternion.Euler(euler); } /// <summary> /// Jump! Must be enabled manually. /// </summary> public bool Jump() { if (!Controller.isGrounded) return false; MoveThrottle += new Vector3(0, JumpForce, 0); return true; } /// <summary> /// Stop this instance. /// </summary> public void Stop() { Controller.Move(Vector3.zero); MoveThrottle = Vector3.zero; FallSpeed = 0.0f; } /// <summary> /// Gets the move scale multiplier. /// </summary> /// <param name="moveScaleMultiplier">Move scale multiplier.</param> public void GetMoveScaleMultiplier(ref float moveScaleMultiplier) { moveScaleMultiplier = MoveScaleMultiplier; } /// <summary> /// Sets the move scale multiplier. /// </summary> /// <param name="moveScaleMultiplier">Move scale multiplier.</param> public void SetMoveScaleMultiplier(float moveScaleMultiplier) { MoveScaleMultiplier = moveScaleMultiplier; } /// <summary> /// Gets the rotation scale multiplier. /// </summary> /// <param name="rotationScaleMultiplier">Rotation scale multiplier.</param> public void GetRotationScaleMultiplier(ref float rotationScaleMultiplier) { rotationScaleMultiplier = RotationScaleMultiplier; } /// <summary> /// Sets the rotation scale multiplier. /// </summary> /// <param name="rotationScaleMultiplier">Rotation scale multiplier.</param> public void SetRotationScaleMultiplier(float rotationScaleMultiplier) { RotationScaleMultiplier = rotationScaleMultiplier; } /// <summary> /// Gets the allow mouse rotation. /// </summary> /// <param name="skipMouseRotation">Allow mouse rotation.</param> public void GetSkipMouseRotation(ref bool skipMouseRotation) { skipMouseRotation = SkipMouseRotation; } /// <summary> /// Sets the allow mouse rotation. /// </summary> /// <param name="skipMouseRotation">If set to <c>true</c> allow mouse rotation.</param> public void SetSkipMouseRotation(bool skipMouseRotation) { SkipMouseRotation = skipMouseRotation; } /// <summary> /// Gets the halt update movement. /// </summary> /// <param name="haltUpdateMovement">Halt update movement.</param> public void GetHaltUpdateMovement(ref bool haltUpdateMovement) { haltUpdateMovement = HaltUpdateMovement; } /// <summary> /// Sets the halt update movement. /// </summary> /// <param name="haltUpdateMovement">If set to <c>true</c> halt update movement.</param> public void SetHaltUpdateMovement(bool haltUpdateMovement) { HaltUpdateMovement = haltUpdateMovement; } /// <summary> /// Resets the player look rotation when the device orientation is reset. /// </summary> public void ResetOrientation() { Vector3 euler = transform.rotation.eulerAngles; euler.y = YRotation; transform.rotation = Quaternion.Euler(euler); } }
using System; using System.Collections; using System.ComponentModel; using System.Drawing; using System.Data; using System.Windows.Forms; using EarLab.ReaderWriters; namespace EarLab.Viewers.Layouts { /// <summary> /// Summary description for LayoutScatter. /// </summary> public class LayoutScatter : System.Windows.Forms.UserControl { public struct BoxDataType { public int cellIDStart; public int cellIDEnd; public double timeStart; public double timeEnd; public ReaderWriterBinarySpikes.SpikeItem[] spikeArray; } /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.Container components = null; private EarLab.Viewers.Panels.Panel2DColor viewerPanel; private EarLab.Viewers.Panels.PanelAxisNew axisPanel; private System.Windows.Forms.Panel backgroundPanel; private ReaderWriterBinarySpikes.SpikeItem[] spikesArray; private double offsetTime, pixelTime; private int cellCount; private Color scatterColor = Color.Black; private Bitmap viewerBitmap; public LayoutScatter() { // This call is required by the Windows.Forms Form Designer. InitializeComponent(); // we set the axes controls to know what controls they are hosting this.axisPanel.TopAxisShow = true; this.axisPanel.TopAxisControl = this.viewerPanel; this.axisPanel.LeftAxisShow = true; this.axisPanel.LeftAxisControl = this.viewerPanel; this.axisPanel.BottomAxisShow = true; this.axisPanel.BottomAxisControl = this.viewerPanel; //this.viewerBitmap = new Bitmap(1, 1, System.Drawing.Imaging.PixelFormat.Format24bppRgb); //this.bitmapGraphics = Graphics.FromImage(this.viewerBitmap); //this.bitmapGraphics.Clear(System.Drawing.SystemColors.Control); } /// <summary> /// Clean up any resources being used. /// </summary> protected override void Dispose( bool disposing ) { if( disposing ) { if(components != null) { components.Dispose(); } } base.Dispose( disposing ); } #region Component Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.backgroundPanel = new System.Windows.Forms.Panel(); this.axisPanel = new EarLab.Viewers.Panels.PanelAxisNew(); this.viewerPanel = new EarLab.Viewers.Panels.Panel2DColor(); this.backgroundPanel.SuspendLayout(); this.axisPanel.SuspendLayout(); this.SuspendLayout(); // // backgroundPanel // this.backgroundPanel.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom) | System.Windows.Forms.AnchorStyles.Left) | System.Windows.Forms.AnchorStyles.Right))); this.backgroundPanel.BackColor = System.Drawing.Color.White; this.backgroundPanel.BorderStyle = System.Windows.Forms.BorderStyle.FixedSingle; this.backgroundPanel.Controls.Add(this.axisPanel); this.backgroundPanel.Location = new System.Drawing.Point(0, 0); this.backgroundPanel.Name = "backgroundPanel"; this.backgroundPanel.Size = new System.Drawing.Size(640, 216); this.backgroundPanel.TabIndex = 0; // // axisPanel // this.axisPanel.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom) | System.Windows.Forms.AnchorStyles.Left) | System.Windows.Forms.AnchorStyles.Right))); this.axisPanel.BottomAxisEndValue = 0F; this.axisPanel.BottomAxisLabel = "Axis Label Not Set"; this.axisPanel.BottomAxisLabelShow = true; this.axisPanel.BottomAxisMajorTickHeight = 3; this.axisPanel.BottomAxisMajorTickNumbersFormat = "0"; this.axisPanel.BottomAxisMajorTickNumbersShow = true; this.axisPanel.BottomAxisMajorTickNumbersSpacing = 10; this.axisPanel.BottomAxisMajorTickOffset = 2; this.axisPanel.BottomAxisShow = false; this.axisPanel.BottomAxisStartValue = 0F; this.axisPanel.Controls.Add(this.viewerPanel); this.axisPanel.Font = new System.Drawing.Font("Arial", 10F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Pixel, ((System.Byte)(0))); this.axisPanel.LeftAxisEndValue = 0F; this.axisPanel.LeftAxisLabel = "Axis Label Not Set"; this.axisPanel.LeftAxisLabelShow = true; this.axisPanel.LeftAxisMajorTickHeight = 3; this.axisPanel.LeftAxisMajorTickNumbersFormat = "0"; this.axisPanel.LeftAxisMajorTickNumbersShow = true; this.axisPanel.LeftAxisMajorTickNumbersSpacing = 10; this.axisPanel.LeftAxisMajorTickOffset = 2; this.axisPanel.LeftAxisShow = false; this.axisPanel.LeftAxisStartValue = 0F; this.axisPanel.Location = new System.Drawing.Point(0, 0); this.axisPanel.Name = "axisPanel"; this.axisPanel.RightAxisEndValue = 0F; this.axisPanel.RightAxisLabel = "Axis Label Not Set"; this.axisPanel.RightAxisLabelShow = true; this.axisPanel.RightAxisMajorTickHeight = 3; this.axisPanel.RightAxisMajorTickNumbersFormat = "0"; this.axisPanel.RightAxisMajorTickNumbersShow = true; this.axisPanel.RightAxisMajorTickNumbersSpacing = 10; this.axisPanel.RightAxisMajorTickOffset = 2; this.axisPanel.RightAxisShow = false; this.axisPanel.RightAxisStartValue = 0F; this.axisPanel.Size = new System.Drawing.Size(638, 214); this.axisPanel.TabIndex = 0; this.axisPanel.TopAxisEndValue = 0F; this.axisPanel.TopAxisLabel = "Axis Label Not Set"; this.axisPanel.TopAxisLabelShow = true; this.axisPanel.TopAxisMajorTickHeight = 3; this.axisPanel.TopAxisMajorTickNumbersFormat = "0"; this.axisPanel.TopAxisMajorTickNumbersShow = true; this.axisPanel.TopAxisMajorTickNumbersSpacing = 10; this.axisPanel.TopAxisMajorTickOffset = 2; this.axisPanel.TopAxisShow = false; this.axisPanel.TopAxisStartValue = 0F; // // viewerPanel // this.viewerPanel.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom) | System.Windows.Forms.AnchorStyles.Left) | System.Windows.Forms.AnchorStyles.Right))); this.viewerPanel.BackColor = System.Drawing.SystemColors.Control; this.viewerPanel.BorderStyle = System.Windows.Forms.BorderStyle.FixedSingle; this.viewerPanel.Location = new System.Drawing.Point(48, 24); this.viewerPanel.Name = "viewerPanel"; this.viewerPanel.Size = new System.Drawing.Size(544, 160); this.viewerPanel.TabIndex = 0; // // LayoutScatter // this.Controls.Add(this.backgroundPanel); this.Name = "LayoutScatter"; this.Size = new System.Drawing.Size(640, 216); this.backgroundPanel.ResumeLayout(false); this.axisPanel.ResumeLayout(false); this.ResumeLayout(false); } #endregion #region Methods public bool View(ReaderWriterBinarySpikes.SpikeItem[] spikesArray, double offsetTime, double pixelTime, int cellCount) { this.spikesArray = spikesArray; this.offsetTime = offsetTime; this.pixelTime = pixelTime; this.cellCount = cellCount; this.axisPanel.BottomAxisStartValue = (float)this.offsetTime; this.axisPanel.BottomAxisEndValue = (float)(this.offsetTime + this.viewerPanel.ClientSize.Width*this.pixelTime); this.axisPanel.Invalidate(); this.CreateBitmap(); this.ViewerRefresh(); return true; } public void SetupAxis(string leftAxisName, string leftAxisFormat, float leftAxisMin, float leftAxisMax, string bottomAxisName, string bottomAxisFormat) { this.axisPanel.LeftAxisLabel = leftAxisName; this.axisPanel.LeftAxisMajorTickNumbersFormat = leftAxisFormat; this.axisPanel.LeftAxisStartValue = leftAxisMin; this.axisPanel.LeftAxisEndValue = leftAxisMax; this.axisPanel.BottomAxisLabel = bottomAxisName; this.axisPanel.BottomAxisMajorTickNumbersFormat = bottomAxisFormat; } private void CreateBitmap() { if (this.viewerBitmap == null || this.viewerBitmap.Width != this.viewerPanel.ClientSize.Width || this.viewerBitmap.Height != this.viewerPanel.ClientSize.Height) { this.viewerBitmap = new Bitmap(this.viewerPanel.ClientSize.Width, this.viewerPanel.ClientSize.Height, System.Drawing.Imaging.PixelFormat.Format24bppRgb); //this.bitmapGraphics = Graphics.FromImage(this.viewerBitmap); } Graphics.FromImage(this.viewerBitmap).Clear(System.Drawing.SystemColors.Control); //this.bitmapGraphics.FillRectangle(new SolidBrush(SystemColors.Control), 0, 0, this.viewerBitmap.Width-1, this.viewerBitmap.Height-1); int x, y; foreach (ReaderWriterBinarySpikes.SpikeItem item in this.spikesArray) { x = (int)Math.Max(0, Math.Min(((item.time-this.offsetTime)/this.pixelTime)-1, this.viewerPanel.ClientSize.Width-1)); y = (int)Math.Max(0, Math.Min((this.viewerPanel.ClientSize.Height-1-this.viewerPanel.ClientSize.Height*(item.cellID+1)/this.cellCount), this.viewerPanel.ClientSize.Height-1)); this.viewerBitmap.SetPixel(x, y, this.scatterColor); } //this.viewerBitmap.RotateFlip(RotateFlipType.RotateNoneFlipY); } public int CellID(int yCoordinate) { return this.cellCount-1-(int)Math.Round((yCoordinate/(double)this.viewerPanel.Height)*this.cellCount, 0); } public double Time(int xCoordinate) { return this.offsetTime+xCoordinate*this.pixelTime; } private void ViewerRefresh() { this.viewerPanel.Bitmap = this.viewerBitmap; this.viewerPanel.Invalidate(); } #endregion #region Properties public Rectangle Box { set { this.viewerPanel.Box = value; } } public BoxDataType BoxData { get { BoxDataType boxData = new BoxDataType(); boxData.cellIDStart = this.CellID(this.viewerPanel.Box.Bottom); boxData.cellIDEnd = this.CellID(this.viewerPanel.Box.Top); boxData.timeStart = this.Time(this.viewerPanel.Box.Left); boxData.timeEnd = this.Time(this.viewerPanel.Box.Right); boxData.spikeArray = this.spikesArray; return boxData; } } public EarLab.Viewers.Panels.Panel2DColor ViewerPanel { get { return this.viewerPanel; } } public EarLab.Viewers.Panels.PanelAxisNew AxisPanel { get { return this.axisPanel; } } #endregion } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ namespace Apache.Ignite.Core { using System; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.IO; using System.Linq; using System.Reflection; using System.Runtime; using System.Threading; using Apache.Ignite.Core.Binary; using Apache.Ignite.Core.Common; using Apache.Ignite.Core.Impl; using Apache.Ignite.Core.Impl.Binary; using Apache.Ignite.Core.Impl.Binary.IO; using Apache.Ignite.Core.Impl.Common; using Apache.Ignite.Core.Impl.Handle; using Apache.Ignite.Core.Impl.Memory; using Apache.Ignite.Core.Impl.Unmanaged; using Apache.Ignite.Core.Lifecycle; using BinaryReader = Apache.Ignite.Core.Impl.Binary.BinaryReader; using UU = Apache.Ignite.Core.Impl.Unmanaged.UnmanagedUtils; /// <summary> /// This class defines a factory for the main Ignite API. /// <p/> /// Use <see cref="Ignition.Start()"/> method to start Ignite with default configuration. /// <para/> /// All members are thread-safe and may be used concurrently from multiple threads. /// </summary> public static class Ignition { /** */ internal const string EnvIgniteSpringConfigUrlPrefix = "IGNITE_SPRING_CONFIG_URL_PREFIX"; /** */ private const string DefaultCfg = "config/default-config.xml"; /** */ private static readonly object SyncRoot = new object(); /** GC warning flag. */ private static int _gcWarn; /** */ private static readonly IDictionary<NodeKey, Ignite> Nodes = new Dictionary<NodeKey, Ignite>(); /** Current DLL name. */ private static readonly string IgniteDllName = Path.GetFileName(Assembly.GetExecutingAssembly().Location); /** Startup info. */ [ThreadStatic] private static Startup _startup; /** Client mode flag. */ [ThreadStatic] private static bool _clientMode; /// <summary> /// Static initializer. /// </summary> [SuppressMessage("Microsoft.Performance", "CA1810:InitializeReferenceTypeStaticFieldsInline")] static Ignition() { AppDomain.CurrentDomain.AssemblyResolve += CurrentDomain_AssemblyResolve; } /// <summary> /// Gets or sets a value indicating whether Ignite should be started in client mode. /// Client nodes cannot hold data in caches. /// </summary> public static bool ClientMode { get { return _clientMode; } set { _clientMode = value; } } /// <summary> /// Starts Ignite with default configuration. By default this method will /// use Ignite configuration defined in <c>{IGNITE_HOME}/config/default-config.xml</c> /// configuration file. If such file is not found, then all system defaults will be used. /// </summary> /// <returns>Started Ignite.</returns> public static IIgnite Start() { return Start(new IgniteConfiguration()); } /// <summary> /// Starts all grids specified within given Spring XML configuration file. If Ignite with given name /// is already started, then exception is thrown. In this case all instances that may /// have been started so far will be stopped too. /// </summary> /// <param name="springCfgPath">Spring XML configuration file path or URL. Note, that the path can be /// absolute or relative to IGNITE_HOME.</param> /// <returns>Started Ignite. If Spring configuration contains multiple Ignite instances, then the 1st /// found instance is returned.</returns> public static IIgnite Start(string springCfgPath) { return Start(new IgniteConfiguration {SpringConfigUrl = springCfgPath}); } /// <summary> /// Starts Ignite with given configuration. /// </summary> /// <returns>Started Ignite.</returns> public unsafe static IIgnite Start(IgniteConfiguration cfg) { IgniteArgumentCheck.NotNull(cfg, "cfg"); // Copy configuration to avoid changes to user-provided instance. IgniteConfigurationEx cfgEx = cfg as IgniteConfigurationEx; cfg = cfgEx == null ? new IgniteConfiguration(cfg) : new IgniteConfigurationEx(cfgEx); // Set default Spring config if needed. if (cfg.SpringConfigUrl == null) cfg.SpringConfigUrl = DefaultCfg; lock (SyncRoot) { // 1. Check GC settings. CheckServerGc(cfg); // 2. Create context. IgniteUtils.LoadDlls(cfg.JvmDllPath); var cbs = new UnmanagedCallbacks(); IgniteManager.CreateJvmContext(cfg, cbs); var gridName = cfgEx != null ? cfgEx.GridName : null; var cfgPath = Environment.GetEnvironmentVariable(EnvIgniteSpringConfigUrlPrefix) + (cfg.SpringConfigUrl ?? DefaultCfg); // 3. Create startup object which will guide us through the rest of the process. _startup = new Startup(cfg, cbs); IUnmanagedTarget interopProc = null; try { // 4. Initiate Ignite start. UU.IgnitionStart(cbs.Context, cfgPath, gridName, ClientMode); // 5. At this point start routine is finished. We expect STARTUP object to have all necessary data. var node = _startup.Ignite; interopProc = node.InteropProcessor; // 6. On-start callback (notify lifecycle components). node.OnStart(); Nodes[new NodeKey(_startup.Name)] = node; return node; } catch (Exception) { // 1. Perform keys cleanup. string name = _startup.Name; if (name != null) { NodeKey key = new NodeKey(name); if (Nodes.ContainsKey(key)) Nodes.Remove(key); } // 2. Stop Ignite node if it was started. if (interopProc != null) UU.IgnitionStop(interopProc.Context, gridName, true); // 3. Throw error further (use startup error if exists because it is more precise). if (_startup.Error != null) throw _startup.Error; throw; } finally { _startup = null; if (interopProc != null) UU.ProcessorReleaseStart(interopProc); } } } /// <summary> /// Check whether GC is set to server mode. /// </summary> /// <param name="cfg">Configuration.</param> private static void CheckServerGc(IgniteConfiguration cfg) { if (!cfg.SuppressWarnings && !GCSettings.IsServerGC && Interlocked.CompareExchange(ref _gcWarn, 1, 0) == 0) Console.WriteLine("GC server mode is not enabled, this could lead to less " + "than optimal performance on multi-core machines (to enable see " + "http://msdn.microsoft.com/en-us/library/ms229357(v=vs.110).aspx)."); } /// <summary> /// Prepare callback invoked from Java. /// </summary> /// <param name="inStream">Intput stream with data.</param> /// <param name="outStream">Output stream.</param> /// <param name="handleRegistry">Handle registry.</param> internal static void OnPrepare(PlatformMemoryStream inStream, PlatformMemoryStream outStream, HandleRegistry handleRegistry) { try { BinaryReader reader = BinaryUtils.Marshaller.StartUnmarshal(inStream); PrepareConfiguration(reader); PrepareLifecycleBeans(reader, outStream, handleRegistry); } catch (Exception e) { _startup.Error = e; throw; } } /// <summary> /// Preapare configuration. /// </summary> /// <param name="reader">Reader.</param> private static void PrepareConfiguration(BinaryReader reader) { // 1. Load assemblies. IgniteConfiguration cfg = _startup.Configuration; LoadAssemblies(cfg.Assemblies); ICollection<string> cfgAssembllies; BinaryConfiguration binaryCfg; BinaryUtils.ReadConfiguration(reader, out cfgAssembllies, out binaryCfg); LoadAssemblies(cfgAssembllies); // 2. Create marshaller only after assemblies are loaded. if (cfg.BinaryConfiguration == null) cfg.BinaryConfiguration = binaryCfg; _startup.Marshaller = new Marshaller(cfg.BinaryConfiguration); } /// <summary> /// Prepare lifecycle beans. /// </summary> /// <param name="reader">Reader.</param> /// <param name="outStream">Output stream.</param> /// <param name="handleRegistry">Handle registry.</param> private static void PrepareLifecycleBeans(BinaryReader reader, PlatformMemoryStream outStream, HandleRegistry handleRegistry) { IList<LifecycleBeanHolder> beans = new List<LifecycleBeanHolder>(); // 1. Read beans defined in Java. int cnt = reader.ReadInt(); for (int i = 0; i < cnt; i++) beans.Add(new LifecycleBeanHolder(CreateLifecycleBean(reader))); // 2. Append beans definied in local configuration. ICollection<ILifecycleBean> nativeBeans = _startup.Configuration.LifecycleBeans; if (nativeBeans != null) { foreach (ILifecycleBean nativeBean in nativeBeans) beans.Add(new LifecycleBeanHolder(nativeBean)); } // 3. Write bean pointers to Java stream. outStream.WriteInt(beans.Count); foreach (LifecycleBeanHolder bean in beans) outStream.WriteLong(handleRegistry.AllocateCritical(bean)); outStream.SynchronizeOutput(); // 4. Set beans to STARTUP object. _startup.LifecycleBeans = beans; } /// <summary> /// Create lifecycle bean. /// </summary> /// <param name="reader">Reader.</param> /// <returns>Lifecycle bean.</returns> private static ILifecycleBean CreateLifecycleBean(BinaryReader reader) { // 1. Instantiate. var bean = IgniteUtils.CreateInstance<ILifecycleBean>(reader.ReadString()); // 2. Set properties. var props = reader.ReadDictionaryAsGeneric<string, object>(); IgniteUtils.SetProperties(bean, props); return bean; } /// <summary> /// Kernal start callback. /// </summary> /// <param name="interopProc">Interop processor.</param> /// <param name="stream">Stream.</param> internal static void OnStart(IUnmanagedTarget interopProc, IBinaryStream stream) { try { // 1. Read data and leave critical state ASAP. BinaryReader reader = BinaryUtils.Marshaller.StartUnmarshal(stream); // ReSharper disable once PossibleInvalidOperationException var name = reader.ReadString(); // 2. Set ID and name so that Start() method can use them later. _startup.Name = name; if (Nodes.ContainsKey(new NodeKey(name))) throw new IgniteException("Ignite with the same name already started: " + name); _startup.Ignite = new Ignite(_startup.Configuration, _startup.Name, interopProc, _startup.Marshaller, _startup.LifecycleBeans, _startup.Callbacks); } catch (Exception e) { // 5. Preserve exception to throw it later in the "Start" method and throw it further // to abort startup in Java. _startup.Error = e; throw; } } /// <summary> /// Load assemblies. /// </summary> /// <param name="assemblies">Assemblies.</param> private static void LoadAssemblies(IEnumerable<string> assemblies) { if (assemblies != null) { foreach (string s in assemblies) { // 1. Try loading as directory. if (Directory.Exists(s)) { string[] files = Directory.GetFiles(s, "*.dll"); #pragma warning disable 0168 foreach (string dllPath in files) { if (!SelfAssembly(dllPath)) { try { Assembly.LoadFile(dllPath); } catch (BadImageFormatException) { // No-op. } } } #pragma warning restore 0168 continue; } // 2. Try loading using full-name. try { Assembly assembly = Assembly.Load(s); if (assembly != null) continue; } catch (Exception e) { if (!(e is FileNotFoundException || e is FileLoadException)) throw new IgniteException("Failed to load assembly: " + s, e); } // 3. Try loading using file path. try { Assembly assembly = Assembly.LoadFrom(s); if (assembly != null) continue; } catch (Exception e) { if (!(e is FileNotFoundException || e is FileLoadException)) throw new IgniteException("Failed to load assembly: " + s, e); } // 4. Not found, exception. throw new IgniteException("Failed to load assembly: " + s); } } } /// <summary> /// Whether assembly points to Ignite binary. /// </summary> /// <param name="assembly">Assembly to check..</param> /// <returns><c>True</c> if this is one of GG assemblies.</returns> private static bool SelfAssembly(string assembly) { return assembly.EndsWith(IgniteDllName, StringComparison.OrdinalIgnoreCase); } /// <summary> /// Gets a named Ignite instance. If Ignite name is {@code null} or empty string, /// then default no-name Ignite will be returned. Note that caller of this method /// should not assume that it will return the same instance every time. /// <p/> /// Note that single process can run multiple Ignite instances and every Ignite instance (and its /// node) can belong to a different grid. Ignite name defines what grid a particular Ignite /// instance (and correspondingly its node) belongs to. /// </summary> /// <param name="name">Ignite name to which requested Ignite instance belongs. If <c>null</c>, /// then Ignite instance belonging to a default no-name Ignite will be returned. /// </param> /// <returns>An instance of named grid.</returns> public static IIgnite GetIgnite(string name) { lock (SyncRoot) { Ignite result; if (!Nodes.TryGetValue(new NodeKey(name), out result)) throw new IgniteException("Ignite instance was not properly started or was already stopped: " + name); return result; } } /// <summary> /// Gets an instance of default no-name grid. Note that /// caller of this method should not assume that it will return the same /// instance every time. /// </summary> /// <returns>An instance of default no-name grid.</returns> public static IIgnite GetIgnite() { return GetIgnite(null); } /// <summary> /// Stops named grid. If <c>cancel</c> flag is set to <c>true</c> then /// all jobs currently executing on local node will be interrupted. If /// grid name is <c>null</c>, then default no-name Ignite will be stopped. /// </summary> /// <param name="name">Grid name. If <c>null</c>, then default no-name Ignite will be stopped.</param> /// <param name="cancel">If <c>true</c> then all jobs currently executing will be cancelled /// by calling <c>ComputeJob.cancel</c>method.</param> /// <returns><c>true</c> if named Ignite instance was indeed found and stopped, <c>false</c> /// othwerwise (the instance with given <c>name</c> was not found).</returns> public static bool Stop(string name, bool cancel) { lock (SyncRoot) { NodeKey key = new NodeKey(name); Ignite node; if (!Nodes.TryGetValue(key, out node)) return false; node.Stop(cancel); Nodes.Remove(key); GC.Collect(); return true; } } /// <summary> /// Stops <b>all</b> started grids. If <c>cancel</c> flag is set to <c>true</c> then /// all jobs currently executing on local node will be interrupted. /// </summary> /// <param name="cancel">If <c>true</c> then all jobs currently executing will be cancelled /// by calling <c>ComputeJob.Cancel()</c> method.</param> public static void StopAll(bool cancel) { lock (SyncRoot) { while (Nodes.Count > 0) { var entry = Nodes.First(); entry.Value.Stop(cancel); Nodes.Remove(entry.Key); } } GC.Collect(); } /// <summary> /// Handles the AssemblyResolve event of the CurrentDomain control. /// </summary> /// <param name="sender">The source of the event.</param> /// <param name="args">The <see cref="ResolveEventArgs"/> instance containing the event data.</param> /// <returns>Manually resolved assembly, or null.</returns> private static Assembly CurrentDomain_AssemblyResolve(object sender, ResolveEventArgs args) { return LoadedAssembliesResolver.Instance.GetAssembly(args.Name); } /// <summary> /// Grid key. /// </summary> private class NodeKey { /** */ private readonly string _name; /// <summary> /// Initializes a new instance of the <see cref="NodeKey"/> class. /// </summary> /// <param name="name">The name.</param> internal NodeKey(string name) { _name = name; } /** <inheritdoc /> */ public override bool Equals(object obj) { var other = obj as NodeKey; return other != null && Equals(_name, other._name); } /** <inheritdoc /> */ public override int GetHashCode() { return _name == null ? 0 : _name.GetHashCode(); } } /// <summary> /// Value object to pass data between .Net methods during startup bypassing Java. /// </summary> private class Startup { /// <summary> /// Constructor. /// </summary> /// <param name="cfg">Configuration.</param> /// <param name="cbs"></param> internal Startup(IgniteConfiguration cfg, UnmanagedCallbacks cbs) { Configuration = cfg; Callbacks = cbs; } /// <summary> /// Configuration. /// </summary> internal IgniteConfiguration Configuration { get; private set; } /// <summary> /// Gets unmanaged callbacks. /// </summary> internal UnmanagedCallbacks Callbacks { get; private set; } /// <summary> /// Lifecycle beans. /// </summary> internal IList<LifecycleBeanHolder> LifecycleBeans { get; set; } /// <summary> /// Node name. /// </summary> internal string Name { get; set; } /// <summary> /// Marshaller. /// </summary> internal Marshaller Marshaller { get; set; } /// <summary> /// Start error. /// </summary> internal Exception Error { get; set; } /// <summary> /// Gets or sets the ignite. /// </summary> internal Ignite Ignite { get; set; } } } }
using System; using System.Text; using System.IO; namespace MqttLib.Core.Messages { /// <summary> /// The different types of messages defined in the MQTT protocol /// </summary> public enum MessageType : byte { CONNECT = 1, CONNACK = 2, PUBLISH = 3, PUBACK = 4, PUBREC = 5, PUBREL = 6, PUBCOMP = 7, SUBSCRIBE = 8, SUBACK = 9, UNSUBSCRIBE = 10, UNSUBACK = 11, PINGREQ = 12, PINGRESP = 13, DISCONNECT = 14, EXTENDEDACK = 15 } internal abstract class MqttMessage : MqttLib.Core.Messages.IPersitentMessage { // UTF8 encoding for all strings protected UTF8Encoding enc = new UTF8Encoding(); protected MessageType msgType; private bool isDuplicate = false; protected bool isRetained = false; protected ulong _messageID = 0; private long _timestamp; public long Timestamp { get { return _timestamp; } set { _timestamp = value; } } public bool Retained { get { return isRetained; } } public bool Duplicate { get { return isDuplicate; } set { isDuplicate = value; } } public ulong MessageID { get { return _messageID; } } protected QoS msgQos = QoS.BestEfforts; protected int variableHeaderLength = 0; public MqttMessage(MessageType msgType, int variableHeaderLength ) { this.variableHeaderLength = variableHeaderLength; this.msgType = msgType; this._timestamp = DateTime.Now.Ticks; } public MqttMessage(MessageType msgType) { this.msgType = msgType; this._timestamp = DateTime.Now.Ticks; } /// <summary> /// Creates an MqttMessage from a data stream /// </summary> /// <param name="str">Input stream</param> /// <param name="header">The first byte of the fixed header of the message</param> public MqttMessage(Stream str, byte header) { this._timestamp = DateTime.Now.Ticks; ConstructHeader(header); variableHeaderLength = DecodeVariableHeaderLength(str); ConstructFromStream(str); } private void ConstructHeader(byte header) { msgType = (MessageType)((header & 0xf0) >> 4); isDuplicate = (header & 0x08) != 0; msgQos = (QoS)((header & 0x06) >> 1); isRetained = (header & 0x01) != 0; } /// <summary> /// Decodes the length of the variable header and payload from the given stream /// </summary> /// <param name="str">Input Stream</param> /// <returns>Length of the variable header and the payload</returns> private int DecodeVariableHeaderLength(Stream str) { int multiplier = 1 ; int value = 0 ; int digit = 0; do { digit = str.ReadByte(); if (digit == -1) { return 0; } value += (digit & 127) * multiplier; multiplier *= 128; } while ((digit & 128) != 0); return value; } /// <summary> /// Encodes the length of the variable header to the format specified in the MQTT protocol /// and writes it to the given stream /// </summary> /// <param name="str">Output Stream</param> /// <param name="length">Length of variable header and payload</param> private void EncodeVariableHeaderLength(Stream str, int length) { byte digit = 0; do { digit = (byte)(length % 128); length /= 128; if (length > 0) { digit |= 0x80; } str.WriteByte(digit); } while ( length > 0); } public void Serialise(Stream str) { MemoryStream ms = new MemoryStream(); // Write the fixed header to the stream byte header = (byte)((byte)msgType << 4); if (isDuplicate) { header |= 8; } header |= (byte)((byte)msgQos << 1); if (isRetained) { header |= 1; } ms.WriteByte(header); // Add the second byte of the fixed header (The variable header length) EncodeVariableHeaderLength(ms, variableHeaderLength); // Write the payload to the stream SendPayload(ms); ms.WriteTo(str); } /// <summary> /// Concrete Message classes should write their variable header and payload to the given stream /// </summary> /// <param name="str">Output Stream</param> protected virtual void SendPayload(Stream str) { throw new Exception("Protocol does not support sending this message type"); } /// <summary> /// Concrete Message classes should extract their variable header and payload from the given stream /// </summary> /// <param name="str">Input Stream</param> protected virtual void ConstructFromStream(Stream str) { throw new Exception("Protocol does not support receiving this type of message"); } protected static void WriteToStream(Stream str, ushort val) { str.WriteByte((byte)(val >> 8)); str.WriteByte((byte)(val & 0xFF)); } protected static void WriteToStream(Stream str, ulong val) { WriteToStream(str, (ushort)((val >> 48) & 0xFFFF)); WriteToStream(str, (ushort)((val >> 32) & 0xFFFF)); WriteToStream(str, (ushort)((val >> 16) & 0xFFFF)); WriteToStream(str, (ushort)(val & 0xFFFF)); } protected static void WriteToStream(Stream str, string val) { UTF8Encoding enc = new UTF8Encoding(); byte[] bs = enc.GetBytes(val); WriteToStream(str, (ushort)bs.Length); str.Write(bs, 0, bs.Length); } protected static void WriteToStream(Stream str, byte[] val) { WriteToStream(str, (ushort)val.Length); str.Write(val, 0, val.Length); } protected static ushort ReadUshortFromStream(Stream str) { // Read two bytes and interpret as ushort in Network Order byte[] data = new byte[2]; ReadCompleteBuffer( str, data ); return (ushort)((data[0] << 8) + data[1]); } protected static ulong ReadUlongFromStream(Stream str) { // Read two bytes and interpret as ushort in Network Order byte[] data = new byte[8]; ReadCompleteBuffer(str, data); return ((ulong)data[0] << 0x38) | ((ulong)data[1] << 0x30) | ((ulong)data[2] << 0x28) | ((ulong)data[3] << 0x20) | ((ulong)data[4] << 0x18) | ((ulong)data[5] << 0x10) | ((ulong)data[6] << 0x8) | (ulong)data[7]; } protected static string ReadStringFromStream(Stream str) { ushort len = ReadUshortFromStream(str); byte[] data = new byte[len]; ReadCompleteBuffer(str, data); UTF8Encoding enc = new UTF8Encoding(); return enc.GetString(data, 0, data.Length); } protected static byte[] ReadCompleteBuffer(Stream str, byte[] buffer) { int read = 0; while (read < buffer.Length) { int res = str.Read(buffer, read, buffer.Length - read); if (res == -1) { throw new Exception("End of stream reached whilst filling buffer"); } read += res; } return buffer; } protected static int GetUTF8StringLength(string s) { UTF8Encoding enc = new UTF8Encoding(); return enc.GetByteCount(s); } public MessageType MsgType { get { return msgType; } } public QoS QualityOfService { get { return msgQos; } } } }
// Graph Engine // Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE.md file in the project root for full license information. // using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Net; using Trinity; using Trinity.Network; using Trinity.Network.Messaging; using Trinity.Core.Lib; using System.IO; using Trinity.Configuration; namespace Trinity.Storage { internal unsafe partial class RemoteStorage : Storage, IDisposable { public override TrinityErrorCode LoadCell(long cell_id, out byte[] cellBuff) { TrinityMessage msg = new TrinityMessage(TrinityMessageType.PRESERVED_SYNC_WITH_RSP, (ushort)RequestType.LoadCell, sizeof(long)); *(long*)(msg.Buffer + TrinityMessage.Offset) = cell_id; TrinityResponse response; Network.Client.SynClient sc = GetClient(); bool sendSuccess = (TrinityErrorCode.E_SUCCESS == sc.SendMessage(msg.Buffer, msg.Size, out response)); PutBackClient(sc); msg.Dispose(); cellBuff = new byte[response.Size - response.Offset]; var eResult = response.ErrorCode; Memory.Copy(response.Buffer, response.Offset, cellBuff, 0, cellBuff.Length); response.Dispose(); return sendSuccess ? eResult : TrinityErrorCode.E_NETWORK_SEND_FAILURE; } public override TrinityErrorCode LoadCell(long cellId, out byte[] cellBuff, out ushort cellType) { TrinityMessage msg = new TrinityMessage(TrinityMessageType.PRESERVED_SYNC_WITH_RSP, (ushort)RequestType.LoadCellWithType, sizeof(long)); *(long*)(msg.Buffer + TrinityMessage.Offset) = cellId; TrinityResponse response; Network.Client.SynClient sc = GetClient(); bool sendSuccess = (TrinityErrorCode.E_SUCCESS == sc.SendMessage(msg.Buffer, msg.Size, out response)); PutBackClient(sc); msg.Dispose(); int payload_len = response.Size - response.Offset; byte* payload_ptr = response.Buffer + response.Offset; int cell_len = payload_len - sizeof(ushort); cellBuff = new byte[cell_len]; var eResult = response.ErrorCode; Memory.Copy(payload_ptr, 0, cellBuff, 0, cell_len); cellType = *(ushort*)(payload_ptr + cell_len); response.Dispose(); return sendSuccess ? eResult : TrinityErrorCode.E_NETWORK_SEND_FAILURE; } public override TrinityErrorCode SaveCell(long cellId, byte[] cellBytes) { fixed (byte* p = cellBytes) { return SaveCell(cellId, p, cellBytes.Length, StorageConfig.c_UndefinedCellType); } } public override TrinityErrorCode SaveCell(long cellId, byte[] cellBytes, int startIndex, int cellSize) { fixed (byte* p = cellBytes) { return SaveCell(cellId, p + startIndex, cellSize, StorageConfig.c_UndefinedCellType); } } public override TrinityErrorCode SaveCell(long cellId, byte[] cellBytes, int startIndex, int cellSize, ushort cellType) { fixed (byte* p = cellBytes) { return SaveCell(cellId, p + startIndex, cellSize, cellType); } } public override TrinityErrorCode SaveCell(long cell_id, byte* cellBytes, int cellSize, ushort cellType) { TrinityMessage msg = new TrinityMessage(TrinityMessageType.PRESERVED_SYNC_WITH_RSP, (ushort)RequestType.SaveCell, cellSize + 14 /*cell_type(2)+ cell_id(8) +cell_size(4)*/); byte* p = msg.Buffer + TrinityMessage.Offset; *(ushort*)p = cellType; p += 2; *(long*)p = cell_id; p += 8; *(int*)p = cellSize; p += 4; Memory.Copy(cellBytes, p, cellSize); TrinityResponse response; Network.Client.SynClient sc = GetClient(); bool sendSuccess = (TrinityErrorCode.E_SUCCESS == sc.SendMessage(msg.Buffer, msg.Size, out response)); PutBackClient(sc); msg.Dispose(); var eResult = response.ErrorCode; response.Dispose(); return sendSuccess ? eResult : TrinityErrorCode.E_NETWORK_SEND_FAILURE; } internal TrinityErrorCode SaveCell(long cellId, byte[] cellBytes, ushort cellType) { fixed (byte* p = cellBytes) { return SaveCell(cellId, p, cellBytes.Length, cellType); } } public override TrinityErrorCode SaveCell(long cellId, byte* cellPtr, int length) { return SaveCell(cellId, cellPtr, length, StorageConfig.c_UndefinedCellType); } public override TrinityErrorCode RemoveCell(long cell_id) { TrinityMessage msg = new TrinityMessage(TrinityMessageType.PRESERVED_SYNC_WITH_RSP, (ushort)RequestType.RemoveCell, sizeof(long)); *(long*)(msg.Buffer + TrinityMessage.Offset) = cell_id; TrinityResponse response; Network.Client.SynClient sc = GetClient(); bool sendSuccess = (TrinityErrorCode.E_SUCCESS == sc.SendMessage(msg.Buffer, msg.Size, out response)); PutBackClient(sc); msg.Dispose(); var eResult = response.ErrorCode; response.Dispose(); return sendSuccess ? eResult : TrinityErrorCode.E_NETWORK_SEND_FAILURE; } public override bool Contains(long cellId) { TrinityMessage request; TrinityResponse response; TrinityErrorCode eResult; bool contains = false; request = new TrinityMessage(TrinityMessageType.PRESERVED_SYNC_WITH_RSP, (ushort)RequestType.Contains, sizeof(long)); *(long*)(request.Buffer + TrinityMessage.Offset) = cellId; Network.Client.SynClient sc = GetClient(); eResult = sc.SendMessage(request.Buffer, request.Size, out response); PutBackClient(sc); if (eResult == TrinityErrorCode.E_SUCCESS) { //returns E_CELL_FOUND or E_CELL_NOTFOUND contains = (response.ErrorCode == TrinityErrorCode.E_CELL_FOUND); } request.Dispose(); response.Dispose(); _error_check(eResult); return contains; } public override TrinityErrorCode GetCellType(long cellId, out ushort cellType) { TrinityMessage msg = new TrinityMessage(TrinityMessageType.PRESERVED_SYNC_WITH_RSP, (ushort)RequestType.GetCellType, sizeof(long)); *(long*)(msg.Buffer + TrinityMessage.Offset) = cellId; TrinityResponse response; Network.Client.SynClient sc = GetClient(); bool sendSuccess = (TrinityErrorCode.E_SUCCESS == sc.SendMessage(msg.Buffer, msg.Size, out response)); PutBackClient(sc); msg.Dispose(); cellType = *(ushort*)(response.Buffer + response.Offset); var eResult = response.ErrorCode; response.Dispose(); return sendSuccess ? eResult : TrinityErrorCode.E_NETWORK_SEND_FAILURE; } public override TrinityErrorCode AddCell(long cellId, byte[] cellBytes) { fixed (byte* p = cellBytes) { return AddCell(cellId, p, cellBytes.Length, StorageConfig.c_UndefinedCellType); } } public override TrinityErrorCode AddCell(long cellId, byte[] cellBytes, int startIndex, int length) { fixed (byte* p = &cellBytes[startIndex]) { return AddCell(cellId, p, length, StorageConfig.c_UndefinedCellType); } } public override TrinityErrorCode AddCell(long cellId, byte[] cellBytes, int startIndex, int length, ushort cellType) { fixed (byte* p = &cellBytes[startIndex]) { return AddCell(cellId, p, length, cellType); } } public override TrinityErrorCode AddCell(long cellId, byte* cellBytes, int cellSize) { return AddCell(cellId, cellBytes, cellSize, StorageConfig.c_UndefinedCellType); } public override TrinityErrorCode AddCell(long cellId, byte* cellBytes, int cellSize, ushort cellType) { TrinityMessage msg = new TrinityMessage(TrinityMessageType.PRESERVED_SYNC_WITH_RSP, (ushort)RequestType.AddCell, cellSize + 14/*cell_type(2)+ cell_id(8) +cell_size(4)*/); byte* p = msg.Buffer + TrinityMessage.Offset; *(ushort*)p = cellType; p += 2; *(long*)p = cellId; p += 8; *(int*)p = cellSize; p += 4; Memory.Copy(cellBytes, p, cellSize); TrinityResponse response; Network.Client.SynClient sc = GetClient(); bool sendSuccess = (TrinityErrorCode.E_SUCCESS == sc.SendMessage(msg.Buffer, msg.Size, out response)); PutBackClient(sc); msg.Dispose(); var eResult = response.ErrorCode; response.Dispose(); return sendSuccess ? eResult : TrinityErrorCode.E_NETWORK_SEND_FAILURE; } public override TrinityErrorCode UpdateCell(long cellId, byte* cellPtr, int length) { TrinityMessage msg = new TrinityMessage(TrinityMessageType.PRESERVED_SYNC_WITH_RSP, (ushort)RequestType.UpdateCell, length + 12/*cell_id(8) +cell_size(4)*/); byte* p = msg.Buffer + TrinityMessage.Offset; *(long*)p = cellId; p += 8; *(int*)p = length; p += 4; Memory.Copy(cellPtr, p, length); TrinityResponse response; Network.Client.SynClient sc = GetClient(); bool sendSuccess = (TrinityErrorCode.E_SUCCESS == sc.SendMessage(msg.Buffer, msg.Size, out response)); PutBackClient(sc); msg.Dispose(); var eResult = response.ErrorCode; response.Dispose(); return sendSuccess ? eResult : TrinityErrorCode.E_NETWORK_SEND_FAILURE; } public override TrinityErrorCode UpdateCell(long cellId, byte[] cellBytes) { fixed (byte* p = cellBytes) { return UpdateCell(cellId, p, cellBytes.Length); } } public override TrinityErrorCode UpdateCell(long cellId, byte[] cellBytes, int startIndex, int length) { fixed (byte* p = cellBytes) { return UpdateCell(cellId, p + startIndex, length); } } } }
// ------------------------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information. // ------------------------------------------------------------------------------ namespace Microsoft.Graph { using Newtonsoft.Json; using Newtonsoft.Json.Linq; using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Net; using System.Net.Http; using System.Net.Http.Headers; using System.Text; using System.Threading.Tasks; /// <summary> /// A <see cref="HttpContent"/> implementation to handle json batch requests. /// </summary> public class BatchRequestContent: HttpContent { /// <summary> /// A BatchRequestSteps property. /// </summary> public IReadOnlyDictionary<string, BatchRequestStep> BatchRequestSteps { get; private set; } /// <summary> /// Gets a serializer for serializing and deserializing JSON objects. /// </summary> public ISerializer Serializer { get; private set; } /// <summary> /// Constructs a new <see cref="BatchRequestContent"/>. /// </summary> public BatchRequestContent() :this(new BatchRequestStep[] { },null) { } /// <summary> /// Constructs a new <see cref="BatchRequestContent"/>. /// </summary> /// <param name="batchRequestSteps">A list of <see cref="BatchRequestStep"/> to add to the batch request content.</param> /// <param name="serializer">A serializer for serializing and deserializing JSON objects.</param> public BatchRequestContent(BatchRequestStep [] batchRequestSteps, ISerializer serializer = null) : this(batchRequestSteps) { this.Serializer = serializer ?? new Serializer(); } /// <summary> /// Constructs a new <see cref="BatchRequestContent"/>. /// </summary> /// <param name="batchRequestSteps">A list of <see cref="BatchRequestStep"/> to add to the batch request content.</param> public BatchRequestContent(params BatchRequestStep[] batchRequestSteps) { if (batchRequestSteps == null) throw new ClientException(new Error { Code = ErrorConstants.Codes.InvalidArgument, Message = string.Format(ErrorConstants.Messages.NullParameter, nameof(batchRequestSteps)) }); if (batchRequestSteps.Count() > CoreConstants.BatchRequest.MaxNumberOfRequests) throw new ClientException(new Error { Code = ErrorConstants.Codes.MaximumValueExceeded, Message = string.Format(ErrorConstants.Messages.MaximumValueExceeded, "Number of batch request steps", CoreConstants.BatchRequest.MaxNumberOfRequests) }); this.Headers.ContentType = new MediaTypeHeaderValue(CoreConstants.MimeTypeNames.Application.Json); BatchRequestSteps = new Dictionary<string, BatchRequestStep>(); foreach (BatchRequestStep requestStep in batchRequestSteps) { if(requestStep.DependsOn != null && !ContainsCorrespondingRequestId(requestStep.DependsOn)) { throw new ClientException(new Error { Code = ErrorConstants.Codes.InvalidArgument, Message = ErrorConstants.Messages.InvalidDependsOnRequestId }); } AddBatchRequestStep(requestStep); } this.Serializer = new Serializer(); } /// <summary> /// Adds a <see cref="BatchRequestStep"/> to batch request content if doesn't exists. /// </summary> /// <param name="batchRequestStep">A <see cref="BatchRequestStep"/> to add.</param> /// <returns>True or false based on addition or not addition of the provided <see cref="BatchRequestStep"/>. </returns> public bool AddBatchRequestStep(BatchRequestStep batchRequestStep) { if (batchRequestStep == null || BatchRequestSteps.ContainsKey(batchRequestStep.RequestId) || BatchRequestSteps.Count >= CoreConstants.BatchRequest.MaxNumberOfRequests //we should not add any more steps ) { return false; } (BatchRequestSteps as IDictionary<string, BatchRequestStep>).Add(batchRequestStep.RequestId, batchRequestStep); return true; } /// <summary> /// Adds a <see cref="HttpRequestMessage"/> to batch request content. /// </summary> /// <param name="httpRequestMessage">A <see cref="HttpRequestMessage"/> to use to build a <see cref="BatchRequestStep"/> to add.</param> /// <returns>The requestId of the newly created <see cref="BatchRequestStep"/></returns> public string AddBatchRequestStep(HttpRequestMessage httpRequestMessage) { if (BatchRequestSteps.Count >= CoreConstants.BatchRequest.MaxNumberOfRequests) throw new ClientException(new Error { Code = ErrorConstants.Codes.MaximumValueExceeded, Message = string.Format(ErrorConstants.Messages.MaximumValueExceeded, "Number of batch request steps", CoreConstants.BatchRequest.MaxNumberOfRequests) }); string requestId = Guid.NewGuid().ToString(); BatchRequestStep batchRequestStep = new BatchRequestStep(requestId, httpRequestMessage); (BatchRequestSteps as IDictionary<string, BatchRequestStep>).Add(batchRequestStep.RequestId, batchRequestStep); return requestId; } /// <summary> /// Adds a <see cref="IBaseRequest"/> to batch request content /// </summary> /// <param name="request">A <see cref="BaseRequest"/> to use to build a <see cref="BatchRequestStep"/> to add.</param> /// <returns>The requestId of the newly created <see cref="BatchRequestStep"/></returns> public string AddBatchRequestStep(IBaseRequest request) { if (BatchRequestSteps.Count >= CoreConstants.BatchRequest.MaxNumberOfRequests) throw new ClientException(new Error { Code = ErrorConstants.Codes.MaximumValueExceeded, Message = string.Format(ErrorConstants.Messages.MaximumValueExceeded, "Number of batch request steps", CoreConstants.BatchRequest.MaxNumberOfRequests) }); string requestId = Guid.NewGuid().ToString(); BatchRequestStep batchRequestStep = new BatchRequestStep(requestId, request.GetHttpRequestMessage()); (BatchRequestSteps as IDictionary<string, BatchRequestStep>).Add(batchRequestStep.RequestId, batchRequestStep); return requestId; } /// <summary> /// Removes a <see cref="BatchRequestStep"/> from batch request content for the specified id. /// </summary> /// <param name="requestId">A unique batch request id to remove.</param> /// <returns>True or false based on removal or not removal of a <see cref="BatchRequestStep"/>.</returns> public bool RemoveBatchRequestStepWithId(string requestId) { if (string.IsNullOrEmpty(requestId)) throw new ClientException( new Error { Code = ErrorConstants.Codes.InvalidArgument, Message = string.Format(ErrorConstants.Messages.NullParameter, nameof(requestId)) }); bool isRemoved = false; if (BatchRequestSteps.ContainsKey(requestId)) { (BatchRequestSteps as IDictionary<string, BatchRequestStep>).Remove(requestId); isRemoved = true; foreach (KeyValuePair<string, BatchRequestStep> batchRequestStep in BatchRequestSteps) { if (batchRequestStep.Value != null && batchRequestStep.Value.DependsOn != null) while (batchRequestStep.Value.DependsOn.Remove(requestId)) ; } } return isRemoved; } internal async Task<JObject> GetBatchRequestContentAsync() { JObject batchRequest = new JObject(); JArray batchRequestItems = new JArray(); foreach (KeyValuePair<string, BatchRequestStep> batchRequestStep in BatchRequestSteps) batchRequestItems.Add(await GetBatchRequestContentFromStepAsync(batchRequestStep.Value)); batchRequest.Add(CoreConstants.BatchRequest.Requests, batchRequestItems); return batchRequest; } private bool ContainsCorrespondingRequestId(IList<string> dependsOn) { return dependsOn.All(requestId => BatchRequestSteps.ContainsKey(requestId)); } private async Task<JObject> GetBatchRequestContentFromStepAsync(BatchRequestStep batchRequestStep) { JObject jRequestContent = new JObject { { CoreConstants.BatchRequest.Id, batchRequestStep.RequestId }, { CoreConstants.BatchRequest.Url, GetRelativeUrl(batchRequestStep.Request.RequestUri) }, { CoreConstants.BatchRequest.Method, batchRequestStep.Request.Method.Method } }; if (batchRequestStep.DependsOn != null && batchRequestStep.DependsOn.Count() > 0) jRequestContent.Add(CoreConstants.BatchRequest.DependsOn, new JArray(batchRequestStep.DependsOn)); if (batchRequestStep.Request.Content?.Headers != null && batchRequestStep.Request.Content.Headers.Count() > 0) jRequestContent.Add(CoreConstants.BatchRequest.Headers, GetContentHeader(batchRequestStep.Request.Content.Headers)); if(batchRequestStep.Request != null && batchRequestStep.Request.Content != null) { jRequestContent.Add(CoreConstants.BatchRequest.Body, await GetRequestContentAsync(batchRequestStep.Request)); } return jRequestContent; } private async Task<JObject> GetRequestContentAsync(HttpRequestMessage request) { try { HttpRequestMessage clonedRequest = await request.CloneAsync(); using (Stream streamContent = await clonedRequest.Content.ReadAsStreamAsync()) { return Serializer.DeserializeObject<JObject>(streamContent); } } catch (Exception ex) { throw new ClientException(new Error { Code = ErrorConstants.Codes.InvalidRequest, Message = ErrorConstants.Messages.UnableToDeserializexContent }, ex); } } private JObject GetContentHeader(HttpContentHeaders headers) { JObject jHeaders = new JObject(); foreach (KeyValuePair<string, IEnumerable<string>> header in headers) { jHeaders.Add(header.Key, GetHeaderValuesAsString(header.Value)); } return jHeaders; } private string GetHeaderValuesAsString(IEnumerable<string> headerValues) { if (headerValues == null || headerValues.Count() == 0) return string.Empty; StringBuilder builder = new StringBuilder(); foreach (string headerValue in headerValues) { builder.Append(headerValue); } return builder.ToString(); } private string GetRelativeUrl(Uri requestUri) { string version = "v1.0"; if (requestUri.AbsoluteUri.Contains("beta")) version = "beta"; return requestUri.AbsoluteUri.Substring(requestUri.AbsoluteUri.IndexOf(version) + version.ToCharArray().Count()); } /// <summary> /// Serialize the HTTP content to a stream as an asynchronous operation. /// </summary> /// <param name="stream">The target stream.</param> /// <param name="context">Information about the transport (channel binding token, for example). This parameter may be null.</param> /// <returns></returns> protected override async Task SerializeToStreamAsync(Stream stream, TransportContext context) { using (StreamWriter streamWritter = new StreamWriter(stream, new UTF8Encoding(), 1024, true)) using (JsonTextWriter textWritter = new JsonTextWriter(streamWritter)) { JObject batchContent = await GetBatchRequestContentAsync(); batchContent.WriteTo(textWritter); } } /// <summary> /// Determines whether the HTTP content has a valid length in bytes. /// </summary> /// <param name="length">The length in bytes of the HTTP content.</param> /// <returns></returns> protected override bool TryComputeLength(out long length) { length = -1; return false; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using System.Diagnostics; using System.Security.Cryptography; using System.Security.Cryptography.X509Certificates; using Microsoft.Win32.SafeHandles; namespace Internal.Cryptography.Pal { internal sealed class OpenSslX509ChainProcessor : IChainPal { // Constructed (0x20) | Sequence (0x10) => 0x30. private const uint ConstructedSequenceTagId = 0x30; public void Dispose() { } public bool? Verify(X509VerificationFlags flags, out Exception exception) { exception = null; bool isEndEntity = true; foreach (X509ChainElement element in ChainElements) { if (HasUnsuppressedError(flags, element, isEndEntity)) { return false; } isEndEntity = false; } return true; } private static bool HasUnsuppressedError(X509VerificationFlags flags, X509ChainElement element, bool isEndEntity) { foreach (X509ChainStatus status in element.ChainElementStatus) { if (status.Status == X509ChainStatusFlags.NoError) { return false; } Debug.Assert( (status.Status & (status.Status - 1)) == 0, "Only one bit is set in status.Status"); // The Windows certificate store API only checks the time error for a "peer trust" certificate, // but we don't have a concept for that in Unix. If we did, we'd need to do that logic that here. // Note also that that logic is skipped if CERT_CHAIN_POLICY_IGNORE_PEER_TRUST_FLAG is set. X509VerificationFlags? suppressionFlag; if (status.Status == X509ChainStatusFlags.RevocationStatusUnknown) { if (isEndEntity) { suppressionFlag = X509VerificationFlags.IgnoreEndRevocationUnknown; } else if (IsSelfSigned(element.Certificate)) { suppressionFlag = X509VerificationFlags.IgnoreRootRevocationUnknown; } else { suppressionFlag = X509VerificationFlags.IgnoreCertificateAuthorityRevocationUnknown; } } else { suppressionFlag = GetSuppressionFlag(status.Status); } // If an error was found, and we do NOT have the suppression flag for it enabled, // we have an unsuppressed error, so return true. (If there's no suppression for a given code, // we (by definition) don't have that flag set. if (!suppressionFlag.HasValue || (flags & suppressionFlag) == 0) { return true; } } return false; } public X509ChainElement[] ChainElements { get; private set; } public X509ChainStatus[] ChainStatus { get; private set; } public SafeX509ChainHandle SafeHandle { get { return null; } } public static IChainPal BuildChain( X509Certificate2 leaf, HashSet<X509Certificate2> candidates, HashSet<X509Certificate2> downloaded, HashSet<X509Certificate2> systemTrusted, OidCollection applicationPolicy, OidCollection certificatePolicy, X509RevocationMode revocationMode, X509RevocationFlag revocationFlag, DateTime verificationTime, ref TimeSpan remainingDownloadTime) { X509ChainElement[] elements; List<X509ChainStatus> overallStatus = new List<X509ChainStatus>(); WorkingChain workingChain = new WorkingChain(); Interop.Crypto.X509StoreVerifyCallback workingCallback = workingChain.VerifyCallback; // An X509_STORE is more comparable to Cryptography.X509Certificate2Collection than to // Cryptography.X509Store. So read this with OpenSSL eyes, not CAPI/CNG eyes. // // (If you need to think of it as an X509Store, it's a volatile memory store) using (SafeX509StoreHandle store = Interop.Crypto.X509StoreCreate()) using (SafeX509StoreCtxHandle storeCtx = Interop.Crypto.X509StoreCtxCreate()) { Interop.Crypto.CheckValidOpenSslHandle(store); Interop.Crypto.CheckValidOpenSslHandle(storeCtx); bool lookupCrl = revocationMode != X509RevocationMode.NoCheck; foreach (X509Certificate2 cert in candidates) { OpenSslX509CertificateReader pal = (OpenSslX509CertificateReader)cert.Pal; if (!Interop.Crypto.X509StoreAddCert(store, pal.SafeHandle)) { throw Interop.Crypto.CreateOpenSslCryptographicException(); } if (lookupCrl) { CrlCache.AddCrlForCertificate( cert, store, revocationMode, verificationTime, ref remainingDownloadTime); // If we only wanted the end-entity certificate CRL then don't look up // any more of them. lookupCrl = revocationFlag != X509RevocationFlag.EndCertificateOnly; } } if (revocationMode != X509RevocationMode.NoCheck) { if (!Interop.Crypto.X509StoreSetRevocationFlag(store, revocationFlag)) { throw Interop.Crypto.CreateOpenSslCryptographicException(); } } SafeX509Handle leafHandle = ((OpenSslX509CertificateReader)leaf.Pal).SafeHandle; if (!Interop.Crypto.X509StoreCtxInit(storeCtx, store, leafHandle)) { throw Interop.Crypto.CreateOpenSslCryptographicException(); } Interop.Crypto.X509StoreCtxSetVerifyCallback(storeCtx, workingCallback); Interop.Crypto.SetX509ChainVerifyTime(storeCtx, verificationTime); int verify = Interop.Crypto.X509VerifyCert(storeCtx); if (verify < 0) { throw Interop.Crypto.CreateOpenSslCryptographicException(); } // Because our callback tells OpenSSL that every problem is ignorable, it should tell us that the // chain is just fine (unless it returned a negative code for an exception) Debug.Assert(verify == 1, "verify == 1"); using (SafeX509StackHandle chainStack = Interop.Crypto.X509StoreCtxGetChain(storeCtx)) { int chainSize = Interop.Crypto.GetX509StackFieldCount(chainStack); elements = new X509ChainElement[chainSize]; int maybeRootDepth = chainSize - 1; // The leaf cert is 0, up to (maybe) the root at chainSize - 1 for (int i = 0; i < chainSize; i++) { List<X509ChainStatus> status = new List<X509ChainStatus>(); List<Interop.Crypto.X509VerifyStatusCode> elementErrors = i < workingChain.Errors.Count ? workingChain.Errors[i] : null; if (elementErrors != null) { AddElementStatus(elementErrors, status, overallStatus); } IntPtr elementCertPtr = Interop.Crypto.GetX509StackField(chainStack, i); if (elementCertPtr == IntPtr.Zero) { throw Interop.Crypto.CreateOpenSslCryptographicException(); } // Duplicate the certificate handle X509Certificate2 elementCert = new X509Certificate2(elementCertPtr); // If the last cert is self signed then it's the root cert, do any extra checks. if (i == maybeRootDepth && IsSelfSigned(elementCert)) { // If the root certificate was downloaded or the system // doesn't trust it, it's untrusted. if (downloaded.Contains(elementCert) || !systemTrusted.Contains(elementCert)) { AddElementStatus( Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_CERT_UNTRUSTED, status, overallStatus); } } elements[i] = new X509ChainElement(elementCert, status.ToArray(), ""); } } } GC.KeepAlive(workingCallback); if ((certificatePolicy != null && certificatePolicy.Count > 0) || (applicationPolicy != null && applicationPolicy.Count > 0)) { List<X509Certificate2> certsToRead = new List<X509Certificate2>(); foreach (X509ChainElement element in elements) { certsToRead.Add(element.Certificate); } CertificatePolicyChain policyChain = new CertificatePolicyChain(certsToRead); bool failsPolicyChecks = false; if (certificatePolicy != null) { if (!policyChain.MatchesCertificatePolicies(certificatePolicy)) { failsPolicyChecks = true; } } if (applicationPolicy != null) { if (!policyChain.MatchesApplicationPolicies(applicationPolicy)) { failsPolicyChecks = true; } } if (failsPolicyChecks) { X509ChainElement leafElement = elements[0]; X509ChainStatus chainStatus = new X509ChainStatus { Status = X509ChainStatusFlags.NotValidForUsage, StatusInformation = SR.Chain_NoPolicyMatch, }; var elementStatus = new List<X509ChainStatus>(leafElement.ChainElementStatus.Length + 1); elementStatus.AddRange(leafElement.ChainElementStatus); AddUniqueStatus(elementStatus, ref chainStatus); AddUniqueStatus(overallStatus, ref chainStatus); elements[0] = new X509ChainElement( leafElement.Certificate, elementStatus.ToArray(), leafElement.Information); } } return new OpenSslX509ChainProcessor { ChainStatus = overallStatus.ToArray(), ChainElements = elements, }; } private static void AddElementStatus( List<Interop.Crypto.X509VerifyStatusCode> errorCodes, List<X509ChainStatus> elementStatus, List<X509ChainStatus> overallStatus) { foreach (var errorCode in errorCodes) { AddElementStatus(errorCode, elementStatus, overallStatus); } } private static void AddElementStatus( Interop.Crypto.X509VerifyStatusCode errorCode, List<X509ChainStatus> elementStatus, List<X509ChainStatus> overallStatus) { X509ChainStatusFlags statusFlag = MapVerifyErrorToChainStatus(errorCode); Debug.Assert( (statusFlag & (statusFlag - 1)) == 0, "Status flag has more than one bit set", "More than one bit is set in status '{0}' for error code '{1}'", statusFlag, errorCode); foreach (X509ChainStatus currentStatus in elementStatus) { if ((currentStatus.Status & statusFlag) != 0) { return; } } X509ChainStatus chainStatus = new X509ChainStatus { Status = statusFlag, StatusInformation = Interop.Crypto.GetX509VerifyCertErrorString(errorCode), }; elementStatus.Add(chainStatus); AddUniqueStatus(overallStatus, ref chainStatus); } private static void AddUniqueStatus(IList<X509ChainStatus> list, ref X509ChainStatus status) { X509ChainStatusFlags statusCode = status.Status; for (int i = 0; i < list.Count; i++) { if (list[i].Status == statusCode) { return; } } list.Add(status); } private static X509VerificationFlags? GetSuppressionFlag(X509ChainStatusFlags status) { switch (status) { case X509ChainStatusFlags.UntrustedRoot: case X509ChainStatusFlags.PartialChain: return X509VerificationFlags.AllowUnknownCertificateAuthority; case X509ChainStatusFlags.NotValidForUsage: case X509ChainStatusFlags.CtlNotValidForUsage: return X509VerificationFlags.IgnoreWrongUsage; case X509ChainStatusFlags.NotTimeValid: return X509VerificationFlags.IgnoreNotTimeValid; case X509ChainStatusFlags.CtlNotTimeValid: return X509VerificationFlags.IgnoreCtlNotTimeValid; case X509ChainStatusFlags.InvalidNameConstraints: case X509ChainStatusFlags.HasNotSupportedNameConstraint: case X509ChainStatusFlags.HasNotDefinedNameConstraint: case X509ChainStatusFlags.HasNotPermittedNameConstraint: case X509ChainStatusFlags.HasExcludedNameConstraint: return X509VerificationFlags.IgnoreInvalidName; case X509ChainStatusFlags.InvalidPolicyConstraints: case X509ChainStatusFlags.NoIssuanceChainPolicy: return X509VerificationFlags.IgnoreInvalidPolicy; case X509ChainStatusFlags.InvalidBasicConstraints: return X509VerificationFlags.IgnoreInvalidBasicConstraints; case X509ChainStatusFlags.HasNotSupportedCriticalExtension: // This field would be mapped in by AllFlags, but we don't have a name for it currently. return (X509VerificationFlags)0x00002000; case X509ChainStatusFlags.NotTimeNested: return X509VerificationFlags.IgnoreNotTimeNested; } return null; } private static X509ChainStatusFlags MapVerifyErrorToChainStatus(Interop.Crypto.X509VerifyStatusCode code) { switch (code) { case Interop.Crypto.X509VerifyStatusCode.X509_V_OK: return X509ChainStatusFlags.NoError; case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_CERT_NOT_YET_VALID: case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_CERT_HAS_EXPIRED: case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_ERROR_IN_CERT_NOT_BEFORE_FIELD: case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_ERROR_IN_CERT_NOT_AFTER_FIELD: return X509ChainStatusFlags.NotTimeValid; case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_CERT_REVOKED: return X509ChainStatusFlags.Revoked; case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_CERT_SIGNATURE_FAILURE: return X509ChainStatusFlags.NotSignatureValid; case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_CERT_UNTRUSTED: case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_DEPTH_ZERO_SELF_SIGNED_CERT: case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_SELF_SIGNED_CERT_IN_CHAIN: return X509ChainStatusFlags.UntrustedRoot; case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_CRL_HAS_EXPIRED: return X509ChainStatusFlags.OfflineRevocation; case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_CRL_NOT_YET_VALID: case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_CRL_SIGNATURE_FAILURE: case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_ERROR_IN_CRL_LAST_UPDATE_FIELD: case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_ERROR_IN_CRL_NEXT_UPDATE_FIELD: case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_KEYUSAGE_NO_CRL_SIGN: case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_UNABLE_TO_DECRYPT_CRL_SIGNATURE: case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_UNABLE_TO_GET_CRL: case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_UNABLE_TO_GET_CRL_ISSUER: case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_UNHANDLED_CRITICAL_CRL_EXTENSION: return X509ChainStatusFlags.RevocationStatusUnknown; case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_INVALID_EXTENSION: return X509ChainStatusFlags.InvalidExtension; case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT: case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT_LOCALLY: case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_UNABLE_TO_VERIFY_LEAF_SIGNATURE: return X509ChainStatusFlags.PartialChain; case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_INVALID_PURPOSE: return X509ChainStatusFlags.NotValidForUsage; case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_INVALID_CA: case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_INVALID_NON_CA: case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_PATH_LENGTH_EXCEEDED: case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_KEYUSAGE_NO_CERTSIGN: case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_KEYUSAGE_NO_DIGITAL_SIGNATURE: return X509ChainStatusFlags.InvalidBasicConstraints; case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_INVALID_POLICY_EXTENSION: case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_NO_EXPLICIT_POLICY: return X509ChainStatusFlags.InvalidPolicyConstraints; case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_CERT_REJECTED: return X509ChainStatusFlags.ExplicitDistrust; case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_UNHANDLED_CRITICAL_EXTENSION: return X509ChainStatusFlags.HasNotSupportedCriticalExtension; case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_CERT_CHAIN_TOO_LONG: throw new CryptographicException(); case Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_OUT_OF_MEM: throw new OutOfMemoryException(); default: Debug.Fail("Unrecognized X509VerifyStatusCode:" + code); throw new CryptographicException(); } } internal static HashSet<X509Certificate2> FindCandidates( X509Certificate2 leaf, X509Certificate2Collection extraStore, HashSet<X509Certificate2> downloaded, HashSet<X509Certificate2> systemTrusted, ref TimeSpan remainingDownloadTime) { var candidates = new HashSet<X509Certificate2>(); var toProcess = new Queue<X509Certificate2>(); toProcess.Enqueue(leaf); using (var systemRootStore = new X509Store(StoreName.Root, StoreLocation.LocalMachine)) using (var systemIntermediateStore = new X509Store(StoreName.CertificateAuthority, StoreLocation.LocalMachine)) using (var userRootStore = new X509Store(StoreName.Root, StoreLocation.CurrentUser)) using (var userIntermediateStore = new X509Store(StoreName.CertificateAuthority, StoreLocation.CurrentUser)) { systemRootStore.Open(OpenFlags.ReadOnly); systemIntermediateStore.Open(OpenFlags.ReadOnly); userRootStore.Open(OpenFlags.ReadOnly); userIntermediateStore.Open(OpenFlags.ReadOnly); X509Certificate2Collection systemRootCerts = systemRootStore.Certificates; X509Certificate2Collection systemIntermediateCerts = systemIntermediateStore.Certificates; X509Certificate2Collection userRootCerts = userRootStore.Certificates; X509Certificate2Collection userIntermediateCerts = userIntermediateStore.Certificates; // fill the system trusted collection foreach (X509Certificate2 userRootCert in userRootCerts) { systemTrusted.Add(userRootCert); } foreach (X509Certificate2 systemRootCert in systemRootCerts) { systemTrusted.Add(systemRootCert); } // ordering in storesToCheck must match how we read into the systemTrusted collection, that is // first in first checked due to how we eventually use the collection in candidatesByReference X509Certificate2Collection[] storesToCheck = { extraStore, userIntermediateCerts, systemIntermediateCerts, userRootCerts, systemRootCerts, }; while (toProcess.Count > 0) { X509Certificate2 current = toProcess.Dequeue(); candidates.Add(current); HashSet<X509Certificate2> results = FindIssuer( current, storesToCheck, downloaded, ref remainingDownloadTime); if (results != null) { foreach (X509Certificate2 result in results) { if (!candidates.Contains(result)) { toProcess.Enqueue(result); } } } } // Avoid sending unused certs into the finalizer queue by doing only a ref check var candidatesByReference = new HashSet<X509Certificate2>( candidates, ReferenceEqualityComparer<X509Certificate2>.Instance); // Dispose any certificates we cloned in, but didn't end up needing. // Since extraStore was provided by users, don't dispose anything it contains. Debug.Assert(storesToCheck.Length > 0, "storesToCheck.Length > 0"); Debug.Assert(storesToCheck[0] == extraStore, "storesToCheck[0] == extraStore"); for (int i = 1; i < storesToCheck.Length; i++) { X509Certificate2Collection collection = storesToCheck[i]; foreach (X509Certificate2 cert in collection) { if (!candidatesByReference.Contains(cert)) { cert.Dispose(); } } } } return candidates; } private static HashSet<X509Certificate2> FindIssuer( X509Certificate2 cert, X509Certificate2Collection[] stores, HashSet<X509Certificate2> downloadedCerts, ref TimeSpan remainingDownloadTime) { if (IsSelfSigned(cert)) { // It's a root cert, we won't make any progress. return null; } SafeX509Handle certHandle = ((OpenSslX509CertificateReader)cert.Pal).SafeHandle; foreach (X509Certificate2Collection store in stores) { HashSet<X509Certificate2> fromStore = null; foreach (X509Certificate2 candidate in store) { SafeX509Handle candidateHandle = ((OpenSslX509CertificateReader)candidate.Pal).SafeHandle; int issuerError = Interop.Crypto.X509CheckIssued(candidateHandle, certHandle); if (issuerError == 0) { if (fromStore == null) { fromStore = new HashSet<X509Certificate2>(); } fromStore.Add(candidate); } } if (fromStore != null) { return fromStore; } } byte[] authorityInformationAccess = null; foreach (X509Extension extension in cert.Extensions) { if (StringComparer.Ordinal.Equals(extension.Oid.Value, Oids.AuthorityInformationAccess)) { // If there's an Authority Information Access extension, it might be used for // looking up additional certificates for the chain. authorityInformationAccess = extension.RawData; break; } } if (authorityInformationAccess != null) { X509Certificate2 downloaded = DownloadCertificate( authorityInformationAccess, ref remainingDownloadTime); if (downloaded != null) { downloadedCerts.Add(downloaded); return new HashSet<X509Certificate2>() { downloaded }; } } return null; } private static bool IsSelfSigned(X509Certificate2 cert) { return StringComparer.Ordinal.Equals(cert.Subject, cert.Issuer); } private static X509Certificate2 DownloadCertificate( byte[] authorityInformationAccess, ref TimeSpan remainingDownloadTime) { // Don't do any work if we're over limit. if (remainingDownloadTime <= TimeSpan.Zero) { return null; } string uri = FindHttpAiaRecord(authorityInformationAccess, Oids.CertificateAuthorityIssuers); if (uri == null) { return null; } return CertificateAssetDownloader.DownloadCertificate(uri, ref remainingDownloadTime); } internal static string FindHttpAiaRecord(byte[] authorityInformationAccess, string recordTypeOid) { DerSequenceReader reader = new DerSequenceReader(authorityInformationAccess); while (reader.HasData) { DerSequenceReader innerReader = reader.ReadSequence(); // If the sequence's first element is a sequence, unwrap it. if (innerReader.PeekTag() == ConstructedSequenceTagId) { innerReader = innerReader.ReadSequence(); } Oid oid = innerReader.ReadOid(); if (StringComparer.Ordinal.Equals(oid.Value, recordTypeOid)) { string uri = innerReader.ReadIA5String(); Uri parsedUri; if (!Uri.TryCreate(uri, UriKind.Absolute, out parsedUri)) { continue; } if (!StringComparer.Ordinal.Equals(parsedUri.Scheme, "http")) { continue; } return uri; } } return null; } private class WorkingChain { internal readonly List<List<Interop.Crypto.X509VerifyStatusCode>> Errors = new List<List<Interop.Crypto.X509VerifyStatusCode>>(); internal int VerifyCallback(int ok, IntPtr ctx) { if (ok < 0) { return ok; } try { using (var storeCtx = new SafeX509StoreCtxHandle(ctx, ownsHandle: false)) { Interop.Crypto.X509VerifyStatusCode errorCode = Interop.Crypto.X509StoreCtxGetError(storeCtx); int errorDepth = Interop.Crypto.X509StoreCtxGetErrorDepth(storeCtx); // We don't report "OK" as an error. // For compatibility with Windows / .NET Framework, do not report X509_V_CRL_NOT_YET_VALID. if (errorCode != Interop.Crypto.X509VerifyStatusCode.X509_V_OK && errorCode != Interop.Crypto.X509VerifyStatusCode.X509_V_ERR_CRL_NOT_YET_VALID) { while (Errors.Count <= errorDepth) { Errors.Add(null); } if (Errors[errorDepth] == null) { Errors[errorDepth] = new List<Interop.Crypto.X509VerifyStatusCode>(); } Errors[errorDepth].Add(errorCode); } } return 1; } catch { return -1; } } } } }
using System; using System.Collections.Generic; using System.Text; using NUnit.Framework; namespace Codentia.Common.Helper.Test { /// <summary> /// This class contains the unit tests for the static class DateHelper /// </summary> [TestFixture] public class DateHelperTest { /// <summary> /// Scenario: Method called with valid parameters /// Expected: th /// </summary> [Test] public void _001_GetOrdinal_TH() { int[] daysWhichAreTH = new int[] { 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 24, 25, 26, 27, 28, 29, 30 }; for (int i = 0; i < daysWhichAreTH.Length; i++) { Assert.That(DateHelper.GetOrdinal(new DateTime(1900, 1, daysWhichAreTH[i])), Is.EqualTo("th"), "Failed on " + daysWhichAreTH[i]); } } /// <summary> /// Scenario: Method called with valid parameters /// Expected: nd /// </summary> [Test] public void _002_GetOrdinal_ND() { int[] daysWhichAreND = new int[] { 2, 22 }; for (int i = 0; i < daysWhichAreND.Length; i++) { Assert.That(DateHelper.GetOrdinal(new DateTime(1900, 1, daysWhichAreND[i])), Is.EqualTo("nd"), "Failed on " + daysWhichAreND[i]); } } /// <summary> /// Scenario: Method called with valid parameters /// Expected: rd /// </summary> [Test] public void _003_GetOrdinal_RD() { int[] daysWhichAreRD = new int[] { 3, 23 }; for (int i = 0; i < daysWhichAreRD.Length; i++) { Assert.That(DateHelper.GetOrdinal(new DateTime(1900, 1, daysWhichAreRD[i])), Is.EqualTo("rd"), "Failed on " + daysWhichAreRD[i]); } } /// <summary> /// Scenario: Method called with valid parameters /// Expected: st /// </summary> [Test] public void _004_GetOrdinal_ST() { int[] daysWhichAreST = new int[] { 1, 21, 31 }; for (int i = 0; i < daysWhichAreST.Length; i++) { Assert.That(DateHelper.GetOrdinal(new DateTime(1900, 1, daysWhichAreST[i])), Is.EqualTo("st"), "Failed on " + daysWhichAreST[i]); } } /// <summary> /// Scenario: Method called with valid parameters, day is a single digit /// Expected: Unpadded day with ordinal e.g 2nd /// </summary> [Test] public void _005_GetDayOfMonthWithOrdinal_SingleDigit() { Assert.That(DateHelper.GetDayOfMonthWithOrdinal(new DateTime(1900, 1, 1)), Is.EqualTo("1st")); } /// <summary> /// Scenario: Method called with valid parameters, day is double digit /// Expected: Unpadded day with ordinal e.g 2nd /// </summary> [Test] public void _006_GetDayOfMonthWithOrdinal_DoubleDigit() { Assert.That(DateHelper.GetDayOfMonthWithOrdinal(new DateTime(1900, 1, 12)), Is.EqualTo("12th")); } /// <summary> /// Scenario: Call DayOfWeekArray /// Expected: Runs successfully /// </summary> [Test] public void _007_DayOfWeekArray() { DayOfWeek[] dowArray = DateHelper.DayOfWeekArray(); Assert.That(dowArray[0], Is.EqualTo(DayOfWeek.Sunday)); Assert.That(dowArray[1], Is.EqualTo(DayOfWeek.Monday)); Assert.That(dowArray[2], Is.EqualTo(DayOfWeek.Tuesday)); Assert.That(dowArray[3], Is.EqualTo(DayOfWeek.Wednesday)); Assert.That(dowArray[4], Is.EqualTo(DayOfWeek.Thursday)); Assert.That(dowArray[5], Is.EqualTo(DayOfWeek.Friday)); Assert.That(dowArray[6], Is.EqualTo(DayOfWeek.Saturday)); } /// <summary> /// Scenario: Call DayOfWeekArray with optional argument /// Expected: Produces the correct list of days /// </summary> [Test] public void _007_DayOfWeekArray_WithStartDay() { // week starts monday DayOfWeek[] dowArray = DateHelper.DayOfWeekArray(DayOfWeek.Monday); Assert.That(dowArray.Length, Is.EqualTo(7)); Assert.That(dowArray[0], Is.EqualTo(DayOfWeek.Monday)); Assert.That(dowArray[1], Is.EqualTo(DayOfWeek.Tuesday)); Assert.That(dowArray[2], Is.EqualTo(DayOfWeek.Wednesday)); Assert.That(dowArray[3], Is.EqualTo(DayOfWeek.Thursday)); Assert.That(dowArray[4], Is.EqualTo(DayOfWeek.Friday)); Assert.That(dowArray[5], Is.EqualTo(DayOfWeek.Saturday)); Assert.That(dowArray[6], Is.EqualTo(DayOfWeek.Sunday)); // week starts thursday dowArray = DateHelper.DayOfWeekArray(DayOfWeek.Thursday); Assert.That(dowArray.Length, Is.EqualTo(7)); Assert.That(dowArray[0], Is.EqualTo(DayOfWeek.Thursday)); Assert.That(dowArray[1], Is.EqualTo(DayOfWeek.Friday)); Assert.That(dowArray[2], Is.EqualTo(DayOfWeek.Saturday)); Assert.That(dowArray[3], Is.EqualTo(DayOfWeek.Sunday)); Assert.That(dowArray[4], Is.EqualTo(DayOfWeek.Monday)); Assert.That(dowArray[5], Is.EqualTo(DayOfWeek.Tuesday)); Assert.That(dowArray[6], Is.EqualTo(DayOfWeek.Wednesday)); } /// <summary> /// Scenario: Call GetTimeSpanAsString /// Expected: string with correct format /// </summary> [Test] public void _008_GetTimeSpanAsString() { // single digits are padded TimeSpan ts = new TimeSpan(0, 0, 5); Assert.That(DateHelper.GetTimeSpanAsString(ts), Is.EqualTo("00:00:00:05")); ts = new TimeSpan(0, 2, 0); Assert.That(DateHelper.GetTimeSpanAsString(ts), Is.EqualTo("00:00:02:00")); ts = new TimeSpan(4, 0, 0); Assert.That(DateHelper.GetTimeSpanAsString(ts), Is.EqualTo("00:04:00:00")); ts = new TimeSpan(9, 0, 0, 0); Assert.That(DateHelper.GetTimeSpanAsString(ts), Is.EqualTo("09:00:00:00")); // double digits ts = new TimeSpan(10, 23, 59, 12); Assert.That(DateHelper.GetTimeSpanAsString(ts), Is.EqualTo("10:23:59:12")); } /// <summary> /// Scenario: Call GetStringAsTimeSpan with badly formatted timespan string /// Expected: Exception /// </summary> [Test] public void _009_GetStringAsTimeSpan_InvalidString() { Assert.That(delegate { DateHelper.GetStringAsTimeSpan("absdghg"); }, Throws.InstanceOf<InvalidCastException>().With.Message.EqualTo("Unable to cast: absdghg as timespan - format is days:hours:mins:secs")); } /// <summary> /// Scenario: Call GetStringAsTimeSpan with correctly formatted timespan string /// Expected: Timespan returned /// </summary> [Test] public void _010_GetStringAsTimeSpan_ValidString() { TimeSpan ts = new TimeSpan(10, 23, 59, 12); string timeString = DateHelper.GetTimeSpanAsString(ts); TimeSpan ts2 = DateHelper.GetStringAsTimeSpan(timeString); Assert.That(ts2, Is.EqualTo(ts)); // test object overload object timeStringObject = DateHelper.GetTimeSpanAsString(ts); TimeSpan ts3 = DateHelper.GetStringAsTimeSpan(timeStringObject); Assert.That(ts3, Is.EqualTo(ts)); } /// <summary> /// Scenario: Call WriteNullableDate with DateTime.MinValue and DateTime.Now /// Expected: Returns DBNull.Value for DateTime.MinValue and DateTime.Now /// </summary> [Test] public void _011_WriteNullableDate() { Assert.That(DateHelper.WriteNullableDate(DateTime.MinValue), Is.EqualTo(DBNull.Value)); DateTime dte = DateTime.Now; Assert.That(DateHelper.WriteNullableDate(dte), Is.EqualTo(dte)); } /// <summary> /// Scenario: Call GetNullableDate with DBNull.Value and DateTime.Now /// Expected: Returns DateTime.MinValue for DBNull.Value and DateTime.Now /// </summary> [Test] public void _012_GetNullableDate() { Assert.That(DateHelper.GetNullableDate(DBNull.Value), Is.EqualTo(DateTime.MinValue)); DateTime dte = DateTime.Now; Assert.That(DateHelper.GetNullableDate(dte), Is.EqualTo(dte)); string dteString = "08 April 2012"; DateTime dtecheck = Convert.ToDateTime(dteString); Assert.That(DateHelper.GetNullableDate(dteString), Is.EqualTo(dtecheck)); } /// <summary> /// Scenario: Call GetDayOfWeekArrayAsByte - all Days of Week /// Expected: Returns 127 /// </summary> [Test] public void _013_GetDayOfWeekArrayAsByte_AllDays() { List<DayOfWeek> days = new List<DayOfWeek>(); days.Add(DayOfWeek.Sunday); days.Add(DayOfWeek.Monday); days.Add(DayOfWeek.Tuesday); days.Add(DayOfWeek.Wednesday); days.Add(DayOfWeek.Thursday); days.Add(DayOfWeek.Friday); days.Add(DayOfWeek.Saturday); Assert.That(DateHelper.GetDayOfWeekArrayAsByte(days.ToArray()), Is.EqualTo(127)); } /// <summary> /// Scenario: Call GetDayOfWeekArrayAsByte - various Days of Week /// Expected: Returns correct byte values /// </summary> [Test] public void _014_GetDayOfWeekArrayAsByte_VariousDays() { List<DayOfWeek> days = new List<DayOfWeek>(); days.Add(DayOfWeek.Sunday); Assert.That(DateHelper.GetDayOfWeekArrayAsByte(days.ToArray()), Is.EqualTo(1)); days.Clear(); days.Add(DayOfWeek.Monday); days.Add(DayOfWeek.Tuesday); Assert.That(DateHelper.GetDayOfWeekArrayAsByte(days.ToArray()), Is.EqualTo(6)); days.Clear(); days.Add(DayOfWeek.Monday); days.Add(DayOfWeek.Wednesday); days.Add(DayOfWeek.Friday); Assert.That(DateHelper.GetDayOfWeekArrayAsByte(days.ToArray()), Is.EqualTo(42)); } /// <summary> /// Scenario: Call GetDayOfWeekArrayAsByte - negative tests /// Expected: Exceptions /// </summary> [Test] public void _015_GetDayOfWeekArrayFromByte_NegativeTests() { Assert.That(delegate { DateHelper.GetDayOfWeekArrayFromByte(0); }, Throws.InstanceOf<ArgumentException>().With.Message.EqualTo("dayofWeekByteValue: 0 is not valid")); Assert.That(delegate { DateHelper.GetDayOfWeekArrayFromByte(128); }, Throws.InstanceOf<ArgumentException>().With.Message.EqualTo("dayofWeekByteValue: 128 cannot be greater than 127")); } /// <summary> /// Scenario: Call GetDayOfWeekArrayAsByte - negative tests /// Expected: Exceptions /// </summary> [Test] public void _016_GetDayOfWeekArrayFromByte() { DayOfWeek[] days = DateHelper.GetDayOfWeekArrayFromByte(1); Assert.That(days.Length, Is.EqualTo(1)); Assert.That(days[0], Is.EqualTo(DayOfWeek.Sunday)); days = DateHelper.GetDayOfWeekArrayFromByte(34); Assert.That(days.Length, Is.EqualTo(2)); Assert.That(days[0], Is.EqualTo(DayOfWeek.Monday)); Assert.That(days[1], Is.EqualTo(DayOfWeek.Friday)); days = DateHelper.GetDayOfWeekArrayFromByte(100); Assert.That(days.Length, Is.EqualTo(3)); Assert.That(days[0], Is.EqualTo(DayOfWeek.Tuesday)); Assert.That(days[1], Is.EqualTo(DayOfWeek.Friday)); Assert.That(days[2], Is.EqualTo(DayOfWeek.Saturday)); days = DateHelper.GetDayOfWeekArrayFromByte(127); Assert.That(days.Length, Is.EqualTo(7)); Assert.That(days[0], Is.EqualTo(DayOfWeek.Sunday)); Assert.That(days[1], Is.EqualTo(DayOfWeek.Monday)); Assert.That(days[2], Is.EqualTo(DayOfWeek.Tuesday)); Assert.That(days[3], Is.EqualTo(DayOfWeek.Wednesday)); Assert.That(days[4], Is.EqualTo(DayOfWeek.Thursday)); Assert.That(days[5], Is.EqualTo(DayOfWeek.Friday)); Assert.That(days[6], Is.EqualTo(DayOfWeek.Saturday)); } /// <summary> /// Scenario: Call GetDayOfWeekFromByte - invalid byte value /// Expected: Exception /// </summary> [Test] public void _017_GetDayOfWeekFromByte_InvalidByteValue() { Assert.That(delegate { DateHelper.GetDayOfWeekFromByte(200); }, Throws.ArgumentException.With.Message.EqualTo("Invalid byte value: 200 for dayofWeekValue")); } /// <summary> /// Scenario: Call GetDayOfWeekFromByte /// Expected: Success /// </summary> [Test] public void _018_GetDayOfWeekFromByte_AllValues() { Assert.That(DateHelper.GetDayOfWeekFromByte(1), Is.EqualTo(DayOfWeek.Sunday)); Assert.That(DateHelper.GetDayOfWeekFromByte(2), Is.EqualTo(DayOfWeek.Monday)); Assert.That(DateHelper.GetDayOfWeekFromByte(4), Is.EqualTo(DayOfWeek.Tuesday)); Assert.That(DateHelper.GetDayOfWeekFromByte(8), Is.EqualTo(DayOfWeek.Wednesday)); Assert.That(DateHelper.GetDayOfWeekFromByte(16), Is.EqualTo(DayOfWeek.Thursday)); Assert.That(DateHelper.GetDayOfWeekFromByte(32), Is.EqualTo(DayOfWeek.Friday)); Assert.That(DateHelper.GetDayOfWeekFromByte(64), Is.EqualTo(DayOfWeek.Saturday)); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. #nullable disable using System; using System.Diagnostics; using System.Linq; using System.Reflection; using System.Reflection.Emit; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; namespace Microsoft.AspNetCore.Routing.Matching { internal static class ILEmitTrieFactory { // The algorthm we use only works for ASCII text. If we find non-ASCII text in the input // we need to reject it and let is be processed with a fallback technique. public const int NotAscii = int.MinValue; // Creates a Func of (string path, int start, int length) => destination // Not using PathSegment here because we don't want to mess with visibility checks and // generating IL without it is easier. public static Func<string, int, int, int> Create( int defaultDestination, int exitDestination, (string text, int destination)[] entries, bool? vectorize) { var method = new DynamicMethod( "GetDestination", typeof(int), new[] { typeof(string), typeof(int), typeof(int), }); GenerateMethodBody(method.GetILGenerator(), defaultDestination, exitDestination, entries, vectorize); #if IL_EMIT_SAVE_ASSEMBLY SaveAssembly(method.GetILGenerator(), defaultDestination, exitDestination, entries, vectorize); #endif return (Func<string, int, int, int>)method.CreateDelegate(typeof(Func<string, int, int, int>)); } // Internal for testing internal static bool ShouldVectorize((string text, int destination)[] entries) { // There's no value in vectorizing the computation if we're on 32bit or // if no string is long enough. We do the vectorized comparison with uint64 ulongs // which isn't beneficial if they don't map to the native size of the CPU. The // vectorized algorithm introduces additional overhead for casing. // Vectorize by default on 64bit (allow override for testing) return (IntPtr.Size == 8) && // Don't vectorize if all of the strings are small (prevents allocating unused locals) entries.Any(e => e.text.Length >= 4); } private static void GenerateMethodBody( ILGenerator il, int defaultDestination, int exitDestination, (string text, int destination)[] entries, bool? vectorize) { vectorize = vectorize ?? ShouldVectorize(entries); // See comments on Locals for details var locals = new Locals(il, vectorize.Value); // See comments on Labels for details var labels = new Labels() { ReturnDefault = il.DefineLabel(), ReturnNotAscii = il.DefineLabel(), }; // See comments on Methods for details var methods = Methods.Instance; // Initializing top-level locals - this is similar to... // ReadOnlySpan<char> span = arg0.AsSpan(arg1, arg2); // ref byte p = ref Unsafe.As<char, byte>(MemoryMarshal.GetReference<char>(span)) // arg0.AsSpan(arg1, arg2) il.Emit(OpCodes.Ldarg_0); il.Emit(OpCodes.Ldarg_1); il.Emit(OpCodes.Ldarg_2); il.Emit(OpCodes.Call, methods.AsSpan); // ReadOnlySpan<char> = ... il.Emit(OpCodes.Stloc, locals.Span); // MemoryMarshal.GetReference<char>(span) il.Emit(OpCodes.Ldloc, locals.Span); il.Emit(OpCodes.Call, methods.GetReference); // Unsafe.As<char, byte>(...) il.Emit(OpCodes.Call, methods.As); // ref byte p = ... il.Emit(OpCodes.Stloc_0, locals.P); var groups = entries.GroupBy(e => e.text.Length).ToArray(); for (var i = 0; i < groups.Length; i++) { var group = groups[i]; // Similar to 'if (length != X) { ... } var inside = il.DefineLabel(); var next = il.DefineLabel(); il.Emit(OpCodes.Ldarg_2); il.Emit(OpCodes.Ldc_I4, group.Key); il.Emit(OpCodes.Beq, inside); il.Emit(OpCodes.Br, next); // Process the group il.MarkLabel(inside); EmitTable(il, group.ToArray(), 0, group.Key, locals, labels, methods); il.MarkLabel(next); } // Exit point - we end up here when the text doesn't match il.MarkLabel(labels.ReturnDefault); il.Emit(OpCodes.Ldc_I4, defaultDestination); il.Emit(OpCodes.Ret); // Exit point - we end up here with the text contains non-ASCII text il.MarkLabel(labels.ReturnNotAscii); il.Emit(OpCodes.Ldc_I4, NotAscii); il.Emit(OpCodes.Ret); } private static void EmitTable( ILGenerator il, (string text, int destination)[] entries, int index, int length, Locals locals, Labels labels, Methods methods) { // We've reached the end of the string. if (index == length) { EmitReturnDestination(il, entries); return; } // If 4 or more characters remain, and we're vectorizing, we should process 4 characters at a time. if (length - index >= 4 && locals.UInt64Value != null) { EmitVectorizedTable(il, entries, index, length, locals, labels, methods); return; } // Fall back to processing a character at a time. EmitSingleCharacterTable(il, entries, index, length, locals, labels, methods); } private static void EmitVectorizedTable( ILGenerator il, (string text, int destination)[] entries, int index, int length, Locals locals, Labels labels, Methods methods) { // Emits code similar to: // // uint64Value = Unsafe.ReadUnaligned<ulong>(ref p); // p = ref Unsafe.Add(ref p, 8); // // if ((uint64Value & ~0x007F007F007F007FUL) == 0) // { // return NotAscii; // } // uint64LowerIndicator = value + (0x0080008000800080UL - 0x0041004100410041UL); // uint64UpperIndicator = value + (0x0080008000800080UL - 0x005B005B005B005BUL); // ulong temp1 = uint64LowerIndicator ^ uint64UpperIndicator // ulong temp2 = temp1 & 0x0080008000800080UL; // ulong temp3 = (temp2) >> 2; // uint64Value = uint64Value ^ temp3; // // This is a vectorized non-branching technique for processing 4 utf16 characters // at a time inside a single uint64. // // Similar to: // https://github.com/GrabYourPitchforks/coreclr/commit/a3c1df25c4225995ffd6b18fd0fc39d6b81fd6a5#diff-d89b6ca07ea349899e45eed5f688a7ebR81 // // Basically we need to check if the text is non-ASCII first and bail if it is. // The rest of the steps will convert the text to lowercase by checking all characters // at a time to see if they are in the A-Z range, that's where 0x0041 and 0x005B come in. // IMPORTANT // // If you are modifying this code, be aware that the easiest way to make a mistake is by // getting the set of casts wrong doing something like: // // il.Emit(OpCodes.Ldc_I8, ~0x007F007F007F007FUL); // // The IL Emit apis don't have overloads that accept ulong or ushort, and will resolve // an overload that does an undesirable conversion (for instance converting ulong to float). // // IMPORTANT // Unsafe.ReadUnaligned<ulong>(ref p) il.Emit(OpCodes.Ldloc, locals.P); il.Emit(OpCodes.Call, methods.ReadUnalignedUInt64); // uint64Value = ... il.Emit(OpCodes.Stloc, locals.UInt64Value); // Unsafe.Add(ref p, 8) il.Emit(OpCodes.Ldloc, locals.P); il.Emit(OpCodes.Ldc_I4, 8); // 8 bytes were read il.Emit(OpCodes.Call, methods.Add); // p = ref ... il.Emit(OpCodes.Stloc, locals.P); // if ((uint64Value & ~0x007F007F007F007FUL) == 0) // { // goto: NotAscii; // } il.Emit(OpCodes.Ldloc, locals.UInt64Value); il.Emit(OpCodes.Ldc_I8, unchecked((long)~0x007F007F007F007FUL)); il.Emit(OpCodes.And); il.Emit(OpCodes.Brtrue, labels.ReturnNotAscii); // uint64Value + (0x0080008000800080UL - 0x0041004100410041UL) il.Emit(OpCodes.Ldloc, locals.UInt64Value); il.Emit(OpCodes.Ldc_I8, unchecked((long)(0x0080008000800080UL - 0x0041004100410041UL))); il.Emit(OpCodes.Add); // uint64LowerIndicator = ... il.Emit(OpCodes.Stloc, locals.UInt64LowerIndicator); // value + (0x0080008000800080UL - 0x005B005B005B005BUL) il.Emit(OpCodes.Ldloc, locals.UInt64Value); il.Emit(OpCodes.Ldc_I8, unchecked((long)(0x0080008000800080UL - 0x005B005B005B005BUL))); il.Emit(OpCodes.Add); // uint64UpperIndicator = ... il.Emit(OpCodes.Stloc, locals.UInt64UpperIndicator); // ulongLowerIndicator ^ ulongUpperIndicator il.Emit(OpCodes.Ldloc, locals.UInt64LowerIndicator); il.Emit(OpCodes.Ldloc, locals.UInt64UpperIndicator); il.Emit(OpCodes.Xor); // ... & 0x0080008000800080UL il.Emit(OpCodes.Ldc_I8, unchecked((long)0x0080008000800080UL)); il.Emit(OpCodes.And); // ... >> 2; il.Emit(OpCodes.Ldc_I4, 2); il.Emit(OpCodes.Shr_Un); // ... ^ uint64Value il.Emit(OpCodes.Ldloc, locals.UInt64Value); il.Emit(OpCodes.Xor); // uint64Value = ... il.Emit(OpCodes.Stloc, locals.UInt64Value); // Now we generate an 'if' ladder with an entry for each of the unique 64 bit sections // of the text. var groups = entries.GroupBy(e => GetUInt64Key(e.text, index)); foreach (var group in groups) { // if (uint64Value == 0x.....) { ... } var next = il.DefineLabel(); il.Emit(OpCodes.Ldloc, locals.UInt64Value); il.Emit(OpCodes.Ldc_I8, unchecked((long)group.Key)); il.Emit(OpCodes.Bne_Un, next); // Process the group EmitTable(il, group.ToArray(), index + 4, length, locals, labels, methods); il.MarkLabel(next); } // goto: defaultDestination il.Emit(OpCodes.Br, labels.ReturnDefault); } private static void EmitSingleCharacterTable( ILGenerator il, (string text, int destination)[] entries, int index, int length, Locals locals, Labels labels, Methods methods) { // See the vectorized code path for a much more thorough explanation. // IMPORTANT // // If you are modifying this code, be aware that the easiest way to make a mistake is by // getting the set of casts wrong doing something like: // // il.Emit(OpCodes.Ldc_I4, ~0x007F); // // The IL Emit apis don't have overloads that accept ulong or ushort, and will resolve // an overload that does an undesirable conversion (for instance convering ulong to float). // // IMPORTANT // Unsafe.ReadUnaligned<ushort>(ref p) il.Emit(OpCodes.Ldloc, locals.P); il.Emit(OpCodes.Call, methods.ReadUnalignedUInt16); // uint16Value = ... il.Emit(OpCodes.Stloc, locals.UInt16Value); // Unsafe.Add(ref p, 2) il.Emit(OpCodes.Ldloc, locals.P); il.Emit(OpCodes.Ldc_I4, 2); // 2 bytes were read il.Emit(OpCodes.Call, methods.Add); // p = ref ... il.Emit(OpCodes.Stloc, locals.P); // if ((uInt16Value & ~0x007FUL) == 0) // { // goto: NotAscii; // } il.Emit(OpCodes.Ldloc, locals.UInt16Value); il.Emit(OpCodes.Ldc_I4, unchecked((int)((uint)~0x007F))); il.Emit(OpCodes.And); il.Emit(OpCodes.Brtrue, labels.ReturnNotAscii); // Since we're handling a single character at a time, it's easier to just // generate an 'if' with two comparisons instead of doing complicated conversion // logic. // Now we generate an 'if' ladder with an entry for each of the unique // characters in the group. var groups = entries.GroupBy(e => GetUInt16Key(e.text, index)); foreach (var group in groups) { // if (uInt16Value == 'A' || uint16Value == 'a') { ... } var next = il.DefineLabel(); var inside = il.DefineLabel(); il.Emit(OpCodes.Ldloc, locals.UInt16Value); il.Emit(OpCodes.Ldc_I4, unchecked((int)((uint)group.Key))); il.Emit(OpCodes.Beq, inside); var upper = (ushort)char.ToUpperInvariant((char)group.Key); if (upper != group.Key) { il.Emit(OpCodes.Ldloc, locals.UInt16Value); il.Emit(OpCodes.Ldc_I4, unchecked((int)((uint)upper))); il.Emit(OpCodes.Beq, inside); } il.Emit(OpCodes.Br, next); // Process the group il.MarkLabel(inside); EmitTable(il, group.ToArray(), index + 1, length, locals, labels, methods); il.MarkLabel(next); } // goto: defaultDestination il.Emit(OpCodes.Br, labels.ReturnDefault); } public static void EmitReturnDestination(ILGenerator il, (string text, int destination)[] entries) { Debug.Assert(entries.Length == 1, "We should have a single entry"); il.Emit(OpCodes.Ldc_I4, entries[0].destination); il.Emit(OpCodes.Ret); } private static ulong GetUInt64Key(string text, int index) { Debug.Assert(index + 4 <= text.Length); var span = text.ToLowerInvariant().AsSpan(index); ref var p = ref Unsafe.As<char, byte>(ref MemoryMarshal.GetReference(span)); return Unsafe.ReadUnaligned<ulong>(ref p); } private static ushort GetUInt16Key(string text, int index) { Debug.Assert(index + 1 <= text.Length); return (ushort)char.ToLowerInvariant(text[index]); } // We require a special build-time define since this is a testing/debugging // feature that will litter the app directory with assemblies. #if IL_EMIT_SAVE_ASSEMBLY private static void SaveAssembly( int defaultDestination, int exitDestination, (string text, int destination)[] entries, bool? vectorize) { var assemblyName = "Microsoft.AspNetCore.Routing.ILEmitTrie" + DateTime.Now.Ticks; var fileName = assemblyName + ".dll"; var assembly = AssemblyBuilder.DefineDynamicAssembly(new AssemblyName(assemblyName), AssemblyBuilderAccess.RunAndSave); var module = assembly.DefineDynamicModule(assemblyName, fileName); var type = module.DefineType("ILEmitTrie"); var method = type.DefineMethod( "GetDestination", MethodAttributes.Public | MethodAttributes.Static, CallingConventions.Standard, typeof(int), new [] { typeof(string), typeof(int), typeof(int), }; GenerateMethodBody(method.GetILGenerator(), defaultDestination, exitDestination, entries, vectorize); type.CreateTypeInfo(); assembly.Save(fileName); } #endif private class Locals { public Locals(ILGenerator il, bool vectorize) { P = il.DeclareLocal(typeof(byte).MakeByRefType()); Span = il.DeclareLocal(typeof(ReadOnlySpan<char>)); UInt16Value = il.DeclareLocal(typeof(ushort)); if (vectorize) { UInt64Value = il.DeclareLocal(typeof(ulong)); UInt64LowerIndicator = il.DeclareLocal(typeof(ulong)); UInt64UpperIndicator = il.DeclareLocal(typeof(ulong)); } } /// <summary> /// Holds current character when processing a character at a time. /// </summary> public LocalBuilder UInt16Value { get; } /// <summary> /// Holds current character when processing 4 characters at a time. /// </summary> public LocalBuilder UInt64Value { get; } /// <summary> /// Used to covert casing. See comments where it's used. /// </summary> public LocalBuilder UInt64LowerIndicator { get; } /// <summary> /// Used to covert casing. See comments where it's used. /// </summary> public LocalBuilder UInt64UpperIndicator { get; } /// <summary> /// Holds a 'ref byte' reference to the current character (in bytes). /// </summary> public LocalBuilder P { get; } /// <summary> /// Holds the relevant portion of the path as a Span[byte]. /// </summary> public LocalBuilder Span { get; } } private class Labels { /// <summary> /// Label to goto that will return the default destination (not a match). /// </summary> public Label ReturnDefault { get; set; } /// <summary> /// Label to goto that will return a sentinel value for non-ascii text. /// </summary> public Label ReturnNotAscii { get; set; } } private class Methods { // Caching because the methods won't change, if we're being called once we're likely to // be called again. public static readonly Methods Instance = new Methods(); private Methods() { // Can't use GetMethod because the parameter is a generic method parameters. Add = typeof(Unsafe) .GetMethods(BindingFlags.Public | BindingFlags.Static) .Where(m => m.Name == nameof(Unsafe.Add)) .Where(m => m.GetGenericArguments().Length == 1) .Where(m => m.GetParameters().Length == 2) .FirstOrDefault() ?.MakeGenericMethod(typeof(byte)); if (Add == null) { throw new InvalidOperationException("Failed to find Unsafe.Add{T}(ref T, int)"); } // Can't use GetMethod because the parameter is a generic method parameters. As = typeof(Unsafe) .GetMethods(BindingFlags.Public | BindingFlags.Static) .Where(m => m.Name == nameof(Unsafe.As)) .Where(m => m.GetGenericArguments().Length == 2) .Where(m => m.GetParameters().Length == 1) .FirstOrDefault() ?.MakeGenericMethod(typeof(char), typeof(byte)); if (Add == null) { throw new InvalidOperationException("Failed to find Unsafe.As{TFrom, TTo}(ref TFrom)"); } AsSpan = typeof(MemoryExtensions).GetMethod( nameof(MemoryExtensions.AsSpan), BindingFlags.Public | BindingFlags.Static, binder: null, new[] { typeof(string), typeof(int), typeof(int), }, modifiers: null); if (AsSpan == null) { throw new InvalidOperationException("Failed to find MemoryExtensions.AsSpan(string, int, int)"); } // Can't use GetMethod because the parameter is a generic method parameters. GetReference = typeof(MemoryMarshal) .GetMethods(BindingFlags.Public | BindingFlags.Static) .Where(m => m.Name == nameof(MemoryMarshal.GetReference)) .Where(m => m.GetGenericArguments().Length == 1) .Where(m => m.GetParameters().Length == 1) // Disambiguate between ReadOnlySpan<> and Span<> - this method is overloaded. .Where(m => m.GetParameters()[0].ParameterType.GetGenericTypeDefinition() == typeof(ReadOnlySpan<>)) .FirstOrDefault() ?.MakeGenericMethod(typeof(char)); if (GetReference == null) { throw new InvalidOperationException("Failed to find MemoryMarshal.GetReference{T}(ReadOnlySpan{T})"); } ReadUnalignedUInt64 = typeof(Unsafe).GetMethod( nameof(Unsafe.ReadUnaligned), BindingFlags.Public | BindingFlags.Static, binder: null, new[] { typeof(byte).MakeByRefType(), }, modifiers: null) .MakeGenericMethod(typeof(ulong)); if (ReadUnalignedUInt64 == null) { throw new InvalidOperationException("Failed to find Unsafe.ReadUnaligned{T}(ref byte)"); } ReadUnalignedUInt16 = typeof(Unsafe).GetMethod( nameof(Unsafe.ReadUnaligned), BindingFlags.Public | BindingFlags.Static, binder: null, new[] { typeof(byte).MakeByRefType(), }, modifiers: null) .MakeGenericMethod(typeof(ushort)); if (ReadUnalignedUInt16 == null) { throw new InvalidOperationException("Failed to find Unsafe.ReadUnaligned{T}(ref byte)"); } } /// <summary> /// <see cref="Unsafe.Add{T}(ref T, int)"/> - Add[ref byte] /// </summary> public MethodInfo Add { get; } /// <summary> /// <see cref="Unsafe.As{TFrom, TTo}(ref TFrom)"/> - As[char, byte] /// </summary> public MethodInfo As { get; } /// <summary> /// <see cref="MemoryExtensions.AsSpan(string, int, int)"/> /// </summary> public MethodInfo AsSpan { get; } /// <summary> /// <see cref="MemoryMarshal.GetReference{T}(ReadOnlySpan{T})"/> - GetReference[char] /// </summary> public MethodInfo GetReference { get; } /// <summary> /// <see cref="Unsafe.ReadUnaligned{T}(ref byte)"/> - ReadUnaligned[ulong] /// </summary> public MethodInfo ReadUnalignedUInt64 { get; } /// <summary> /// <see cref="Unsafe.ReadUnaligned{T}(ref byte)"/> - ReadUnaligned[ushort] /// </summary> public MethodInfo ReadUnalignedUInt16 { get; } } } }
using System; using System.ComponentModel; using System.Drawing; using System.Drawing.Design; using System.Runtime.Serialization; using System.Xml.Linq; using BASeCamp.BASeBlock.Particles; using BASeCamp.Elementizer; namespace BASeCamp.BASeBlock.Blocks { [Serializable()] [ImpactEffectBlockCategory] [BlockDescription("Shoots a temporary ball in a given direction each time it is hit.")] public class RayBlock : ImageBlock { public enum RayFireDirection { Fire_Left, Fire_Up, Fire_Right, Fire_Down, } protected Type _ShootProjectileType = typeof(ProjectileBall); private String _ShootProjectileTypeString { get { return _ShootProjectileType.Name; } set { _ShootProjectileType = BCBlockGameState.FindClass(value); } } [Editor(typeof(ItemTypeEditor<iProjectile>),typeof(UITypeEditor))] public Type ShootProjectileType { get { return _ShootProjectileType; } set { _ShootProjectileType = value; } } public RayFireDirection mFireDirection = RayFireDirection.Fire_Up; private float LaunchVelocity = 3; /// <summary> /// Position from which to launch projectiles. relative to the block. defaults to top-center. /// </summary> private PointF LaunchPosition; public RayBlock(RectangleF blockrect, PointF LaunchVelocity, PointF LaunchPosition) : base(blockrect, "rayblock") { } public RayBlock(SerializationInfo info, StreamingContext context) : base(info, context) { //custom added code here... //info.AddValue("LaunchVelocity", LaunchVelocity); //info.AddValue("LaunchPosition", LaunchPosition); mFireDirection = (RayFireDirection)info.GetValue("FireDirection", typeof(RayFireDirection)); LaunchPosition = (PointF)info.GetValue("LaunchPosition", typeof(PointF)); LaunchVelocity = info.GetSingle("LaunchVelocity"); try { _ShootProjectileTypeString = info.GetString("ShootProjectileType"); } catch { _ShootProjectileType = typeof(ProjectileBall); } } public RayBlock(XElement Source, Object pPersistenceData) :base(Source,pPersistenceData) { mFireDirection = (RayFireDirection) Source.GetAttributeInt("FireDirection"); LaunchVelocity = Source.GetAttributeFloat("LaunchVelocity"); LaunchPosition = (PointF)Source.ReadElement<PointF>("LaunchPosition"); try { _ShootProjectileTypeString = Source.GetAttributeString("ShootProjectileType"); } catch(Exception exx) { _ShootProjectileType = typeof(ProjectileBall); } } public override XElement GetXmlData(String pNodeName,Object pPersistenceData) { var result = base.GetXmlData(pNodeName,pPersistenceData); result.Add(new XAttribute("FireDirection",(int) mFireDirection)); result.Add(StandardHelper.SaveElement(LaunchPosition,"LaunchPosition",pPersistenceData)); result.Add(new XAttribute("LaunchVelocity",LaunchVelocity)); result.Add(new XAttribute("ShootProjectileType",_ShootProjectileType.Name)); return result; } public RayBlock(RectangleF blockrect) : this(blockrect, new PointF(0, -3), new PointF(blockrect.Width / 2, 0)) { } protected RayBlock(ImageBlock clonethis) : base(clonethis) { } public override string GetToolTipInfo(IEditorClient Client) { return base.GetToolTipInfo(Client) + "\n" + "Shoots:" + _ShootProjectileTypeString; } public override object Clone() { return new RayBlock(this); } public void ShootProjectile(BCBlockGameState parentstate) { ShootProjectile(parentstate, 2); } public void ShootProjectile(BCBlockGameState parentstate,float Speed) { //first, determine appropriate velocity and starting locations. float usespeed = Speed; float useradius = 2f; PointF VelocityUse = new PointF(); PointF InitialLocation = new PointF(); float CenterX = BlockRectangle.Left + (BlockRectangle.Width / 2); float CenterY = BlockRectangle.Top + (BlockRectangle.Height / 2); switch (mFireDirection) { case RayFireDirection.Fire_Up: VelocityUse = new PointF(0, -usespeed); InitialLocation = new PointF(CenterX, BlockRectangle.Top - useradius); break; case RayFireDirection.Fire_Down: VelocityUse = new PointF(0, usespeed); InitialLocation = new PointF(CenterX, BlockRectangle.Bottom + useradius); break; case RayFireDirection.Fire_Left: VelocityUse = new PointF(-usespeed, 0); InitialLocation = new PointF(BlockRectangle.Left - useradius, CenterY); break; case RayFireDirection.Fire_Right: VelocityUse = new PointF(usespeed, 0); InitialLocation = new PointF(BlockRectangle.Right + useradius, CenterY); break; } if (parentstate.GameArea.Contains(new Point((int)InitialLocation.X + (int)VelocityUse.X, (int)InitialLocation.Y + (int)VelocityUse.Y))) { //Debug.Print("using velocity:" + VelocityUse.ToString()); iProjectile newprojectile = (iProjectile)Activator.CreateInstance(_ShootProjectileType); newprojectile.Location = InitialLocation; newprojectile.Velocity = VelocityUse; if (newprojectile is cBall) { parentstate.NextFrameCalls.Enqueue(new BCBlockGameState.NextFrameStartup(() => parentstate.Balls.AddLast((cBall)newprojectile))); } else if (newprojectile is GameObject) { parentstate.NextFrameCalls.Enqueue(new BCBlockGameState.NextFrameStartup(() => parentstate.GameObjects.AddLast((GameObject)newprojectile))); } //parentstate.NextFrameCalls.Enqueue(new BCBlockGameState.NextFrameStartup(()=>parentstate.GameObjects.AddLast(newprojectile)); /*cBall newball = new cBall(InitialLocation, VelocityUse); //newball.isTempBall=true; newball.Behaviours.Clear(); newball.Behaviours.Add(new TempBallBehaviour()); newball.Radius = useradius; ballsadded.Add(newball);*/ } } public override bool PerformBlockHit(BCBlockGameState parentstate, cBall ballhit) { BCBlockGameState.Soundman.PlaySound("ray", 0.9f); //parentstate.GameScore += 65; //AddScore(parentstate, 10); ShootProjectile(parentstate,ballhit.getMagnitude()); /*if (BlockRectangle.Top > 10) { cBall newball = new cBall( new PointF(BlockRectangle.Left + (float) (BlockRectangle.Width/2f), BlockRectangle.Top - 5), new PointF(0, -4)); newball.isTempBall = true; ballsadded.Add(newball); } */ //ray blocks cannot be destroyed. return false; } protected override Particle AddStandardSprayParticle(BCBlockGameState parentstate, cBall ballhit) { //return base.AddStandardSprayParticle(parentstate, ballhit); return AddSprayParticle_Default(parentstate, ballhit); } public override bool MustDestroy() { return false; } public override void GetObjectData(SerializationInfo info, StreamingContext context) { base.GetObjectData(info, context); info.AddValue("LaunchVelocity", LaunchVelocity); info.AddValue("LaunchPosition", LaunchPosition); info.AddValue("FireDirection", mFireDirection); info.AddValue("ShootProjectileType", _ShootProjectileTypeString); } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.ObjectModel; using System.Diagnostics; using System.Text; using Microsoft.CodeAnalysis.Collections; using Type = Microsoft.VisualStudio.Debugger.Metadata.Type; namespace Microsoft.CodeAnalysis.ExpressionEvaluator { // Implementation for "displaying type name as string" aspect of the Formatter component internal abstract partial class Formatter { /// <returns>The qualified name (i.e. including containing types and namespaces) of a named, /// pointer, or array type.</returns> internal string GetTypeName(TypeAndCustomInfo typeAndInfo, bool escapeKeywordIdentifiers, out bool sawInvalidIdentifier) { var type = typeAndInfo.Type; if (type == null) { throw new ArgumentNullException(nameof(type)); } ReadOnlyCollection<byte> dynamicFlags = null; ReadOnlyCollection<string> tupleElementNames = null; var typeInfo = typeAndInfo.Info; if (typeInfo != null) { CustomTypeInfo.Decode(typeInfo.PayloadTypeId, typeInfo.Payload, out dynamicFlags, out tupleElementNames); } var dynamicFlagIndex = 0; var tupleElementIndex = 0; var pooled = PooledStringBuilder.GetInstance(); AppendQualifiedTypeName( pooled.Builder, type, dynamicFlags, ref dynamicFlagIndex, tupleElementNames, ref tupleElementIndex, escapeKeywordIdentifiers, out sawInvalidIdentifier); return pooled.ToStringAndFree(); } /// <summary> /// Append the qualified name (i.e. including containing types and namespaces) of a named, /// pointer, or array type to <paramref name="builder"/>. /// </summary> /// <remarks> /// Keyword strings are appended for primitive types (e.g. "int" for "System.Int32"). /// Question mark syntax is used for <see cref="Nullable{T}"/>. /// No special handling is required for anonymous types - they are expected to be /// emitted with <see cref="DebuggerDisplayAttribute.Type"/> set to "&lt;Anonymous Type&gt;. /// This is fortunate, since we don't have a good way to recognize them in metadata. /// </remarks> protected void AppendQualifiedTypeName( StringBuilder builder, Type type, ReadOnlyCollection<byte> dynamicFlags, ref int dynamicFlagIndex, ReadOnlyCollection<string> tupleElementNames, ref int tupleElementIndex, bool escapeKeywordIdentifiers, out bool sawInvalidIdentifier) { Type originalType = type; // Can have an array of pointers, but not a pointer to an array, so consume these first. // We'll reconstruct this information later from originalType. while (type.IsArray) { dynamicFlagIndex++; type = type.GetElementType(); } int pointerCount = 0; while (type.IsPointer) { var elementType = type.GetElementType(); if (elementType == null) { // Null for function pointers. break; } dynamicFlagIndex++; pointerCount++; type = elementType; } int nullableCount = 0; Type typeArg; while ((typeArg = type.GetNullableTypeArgument()) != null) { dynamicFlagIndex++; nullableCount++; type = typeArg; } Debug.Assert(nullableCount < 2, "Benign: someone is nesting nullables."); Debug.Assert(pointerCount == 0 || nullableCount == 0, "Benign: pointer to nullable?"); AppendQualifiedTypeNameInternal( builder, type, dynamicFlags, ref dynamicFlagIndex, tupleElementNames, ref tupleElementIndex, escapeKeywordIdentifiers, out sawInvalidIdentifier); builder.Append('?', nullableCount); builder.Append('*', pointerCount); type = originalType; while (type.IsArray) { AppendRankSpecifier(builder, type.GetArrayRank()); type = type.GetElementType(); } } /// <summary> /// Append the qualified name (i.e. including containing types and namespaces) of a named type /// (i.e. not a pointer or array type) to <paramref name="builder"/>. /// </summary> /// <remarks> /// Keyword strings are appended for primitive types (e.g. "int" for "System.Int32"). /// </remarks> /// <remarks> /// Does not call itself or <see cref="AppendQualifiedTypeName"/> (directly). /// </remarks> private void AppendQualifiedTypeNameInternal( StringBuilder builder, Type type, ReadOnlyCollection<byte> dynamicFlags, ref int dynamicFlagIndex, ReadOnlyCollection<string> tupleElementNames, ref int tupleElementIndex, bool escapeKeywordIdentifiers, out bool sawInvalidIdentifier) { var isDynamic = DynamicFlagsCustomTypeInfo.GetFlag(dynamicFlags, dynamicFlagIndex++) && type.IsObject(); if (AppendSpecialTypeName(builder, type, isDynamic)) { sawInvalidIdentifier = false; return; } Debug.Assert(!isDynamic, $"Dynamic should have been handled by {nameof(AppendSpecialTypeName)}"); Debug.Assert(!IsPredefinedType(type)); if (type.IsGenericParameter) { AppendIdentifier(builder, escapeKeywordIdentifiers, type.Name, out sawInvalidIdentifier); return; } int cardinality; if (type.IsTupleCompatible(out cardinality)) { if (cardinality == 1) { // Not displayed as a tuple but is included in tuple element names. tupleElementIndex++; } else { AppendTupleElements( builder, type, cardinality, dynamicFlags, ref dynamicFlagIndex, tupleElementNames, ref tupleElementIndex, escapeKeywordIdentifiers, out sawInvalidIdentifier); return; } } // Note: in the Reflection/LMR object model, all type arguments are on the most nested type. var hasTypeArguments = type.IsGenericType; var typeArguments = hasTypeArguments ? type.GetGenericArguments() : null; Debug.Assert(hasTypeArguments == (typeArguments != null)); var numTypeArguments = hasTypeArguments ? typeArguments.Length : 0; sawInvalidIdentifier = false; bool sawSingleInvalidIdentifier; var typeArgumentOffset = 0; if (type.IsNested) { // Push from inside, out. var stack = ArrayBuilder<Type>.GetInstance(); { var containingType = type.DeclaringType; while (containingType != null) { stack.Add(containingType); containingType = containingType.DeclaringType; } } var lastContainingTypeIndex = stack.Count - 1; AppendNamespacePrefix(builder, stack[lastContainingTypeIndex], escapeKeywordIdentifiers, out sawSingleInvalidIdentifier); sawInvalidIdentifier |= sawSingleInvalidIdentifier; // Pop from outside, in. for (int i = lastContainingTypeIndex; i >= 0; i--) { var containingType = stack[i]; // ACASEY: I explored the type in the debugger and couldn't find the arity stored/exposed separately. int arity = hasTypeArguments ? containingType.GetGenericArguments().Length - typeArgumentOffset : 0; AppendUnqualifiedTypeName( builder, containingType, dynamicFlags, ref dynamicFlagIndex, tupleElementNames, ref tupleElementIndex, escapeKeywordIdentifiers, typeArguments, typeArgumentOffset, arity, out sawSingleInvalidIdentifier); sawInvalidIdentifier |= sawSingleInvalidIdentifier; builder.Append('.'); typeArgumentOffset += arity; } stack.Free(); } else { AppendNamespacePrefix(builder, type, escapeKeywordIdentifiers, out sawSingleInvalidIdentifier); sawInvalidIdentifier |= sawSingleInvalidIdentifier; } AppendUnqualifiedTypeName( builder, type, dynamicFlags, ref dynamicFlagIndex, tupleElementNames, ref tupleElementIndex, escapeKeywordIdentifiers, typeArguments, typeArgumentOffset, numTypeArguments - typeArgumentOffset, out sawSingleInvalidIdentifier); sawInvalidIdentifier |= sawSingleInvalidIdentifier; } /// <summary> /// Helper for appending the qualified name of the containing namespace of a type. /// NOTE: Unless the qualified name is empty, there will always be a trailing dot. /// </summary> private void AppendNamespacePrefix(StringBuilder builder, Type type, bool escapeKeywordIdentifiers, out bool sawInvalidIdentifier) { sawInvalidIdentifier = false; var @namespace = type.Namespace; if (!string.IsNullOrEmpty(@namespace)) { if (@namespace.Contains(".")) { bool sawSingleInvalidIdentifier; var pooled = PooledStringBuilder.GetInstance(); var identifierBuilder = pooled.Builder; foreach (var ch in @namespace) { if (ch == '.') { AppendIdentifier(builder, escapeKeywordIdentifiers, identifierBuilder.ToString(), out sawSingleInvalidIdentifier); sawInvalidIdentifier |= sawSingleInvalidIdentifier; builder.Append(ch); identifierBuilder.Clear(); } else { identifierBuilder.Append(ch); } } AppendIdentifier(builder, escapeKeywordIdentifiers, identifierBuilder.ToString(), out sawSingleInvalidIdentifier); sawInvalidIdentifier |= sawSingleInvalidIdentifier; pooled.Free(); } else { AppendIdentifier(builder, escapeKeywordIdentifiers, @namespace, out sawInvalidIdentifier); } builder.Append('.'); } } /// <summary> /// Append the name of the type and its type arguments. Do not append the type's containing type or namespace. /// </summary> /// <param name="builder">Builder to which the name will be appended.</param> /// <param name="type">Type, the name of which will be appended.</param> /// <param name="dynamicFlags">Flags indicating which occurrences of &quot;object&quot; need to be replaced by &quot;dynamic&quot;.</param> /// <param name="dynamicFlagIndex">Current index into <paramref name="dynamicFlags"/>.</param> /// <param name="tupleElementNames">Non-default tuple names.</param> /// <param name="tupleElementIndex">Current index into <paramref name="tupleElementNames"/>.</param> /// <param name="escapeKeywordIdentifiers">True if identifiers that are also keywords should be prefixed with '@'.</param> /// <param name="typeArguments"> /// The type arguments of the type passed to <see cref="AppendQualifiedTypeNameInternal"/>, which might be nested /// within <paramref name="type"/>. In the Reflection/LMR object model, all type arguments are passed to the /// most nested type. To get back to the C# model, we have to propagate them out to containing types. /// </param> /// <param name="typeArgumentOffset"> /// The first position in <paramref name="typeArguments"/> that is a type argument to <paramref name="type"/>, /// from a C# perspective. /// </param> /// <param name="arity"> /// The number of type parameters of <paramref name="type"/>, from a C# perspective. /// </param> /// <param name="sawInvalidIdentifier">True if the name includes an invalid identifier (see <see cref="IsValidIdentifier"/>); false otherwise.</param> /// <remarks> /// We're passing the full array plus bounds, rather than a tailored array, to avoid creating a lot of short-lived /// temporary arrays. /// </remarks> private void AppendUnqualifiedTypeName( StringBuilder builder, Type type, ReadOnlyCollection<byte> dynamicFlags, ref int dynamicFlagIndex, ReadOnlyCollection<string> tupleElementNames, ref int tupleElementIndex, bool escapeKeywordIdentifiers, Type[] typeArguments, int typeArgumentOffset, int arity, out bool sawInvalidIdentifier) { if (typeArguments == null || arity == 0) { AppendIdentifier(builder, escapeKeywordIdentifiers, type.Name, out sawInvalidIdentifier); return; } var mangledName = type.Name; var separatorIndex = mangledName.IndexOf('`'); var unmangledName = separatorIndex < 0 ? mangledName : mangledName.Substring(0, separatorIndex); AppendIdentifier(builder, escapeKeywordIdentifiers, unmangledName, out sawInvalidIdentifier); bool argumentsSawInvalidIdentifier; AppendGenericTypeArguments( builder, typeArguments, typeArgumentOffset, dynamicFlags, ref dynamicFlagIndex, tupleElementNames, ref tupleElementIndex, arity, escapeKeywordIdentifiers, out argumentsSawInvalidIdentifier); sawInvalidIdentifier |= argumentsSawInvalidIdentifier; } private void AppendTupleElements( StringBuilder builder, Type type, int cardinality, ReadOnlyCollection<byte> dynamicFlags, ref int dynamicFlagIndex, ReadOnlyCollection<string> tupleElementNames, ref int tupleElementIndex, bool escapeKeywordIdentifiers, out bool sawInvalidIdentifier) { sawInvalidIdentifier = false; #if DEBUG int lastNameIndex = tupleElementIndex + cardinality; #endif int nameIndex = tupleElementIndex; builder.Append('('); bool any = false; while (true) { tupleElementIndex += cardinality; var typeArguments = type.GetGenericArguments(); int nTypeArgs = typeArguments.Length; Debug.Assert(nTypeArgs > 0); Debug.Assert(nTypeArgs <= TypeHelpers.TupleFieldRestPosition); int nFields = Math.Min(nTypeArgs, TypeHelpers.TupleFieldRestPosition - 1); for (int i = 0; i < nFields; i++) { if (any) { builder.Append(", "); } bool sawSingleInvalidIdentifier; var name = CustomTypeInfo.GetTupleElementNameIfAny(tupleElementNames, nameIndex); nameIndex++; AppendTupleElement( builder, typeArguments[i], name, dynamicFlags, ref dynamicFlagIndex, tupleElementNames, ref tupleElementIndex, escapeKeywordIdentifiers, sawInvalidIdentifier: out sawSingleInvalidIdentifier); sawInvalidIdentifier |= sawSingleInvalidIdentifier; any = true; } if (nTypeArgs < TypeHelpers.TupleFieldRestPosition) { break; } Debug.Assert(!DynamicFlagsCustomTypeInfo.GetFlag(dynamicFlags, dynamicFlagIndex)); dynamicFlagIndex++; type = typeArguments[nTypeArgs - 1]; cardinality = type.GetTupleCardinalityIfAny(); } #if DEBUG Debug.Assert(nameIndex == lastNameIndex); #endif builder.Append(')'); } protected void AppendIdentifier(StringBuilder builder, bool escapeKeywordIdentifiers, string identifier, out bool sawInvalidIdentifier) { if (escapeKeywordIdentifiers) { AppendIdentifierEscapingPotentialKeywords(builder, identifier, out sawInvalidIdentifier); } else { sawInvalidIdentifier = !IsValidIdentifier(identifier); builder.Append(identifier); } } #region Language-specific type name formatting behavior protected abstract void AppendIdentifierEscapingPotentialKeywords(StringBuilder builder, string identifier, out bool sawInvalidIdentifier); protected abstract void AppendGenericTypeArguments( StringBuilder builder, Type[] typeArguments, int typeArgumentOffset, ReadOnlyCollection<byte> dynamicFlags, ref int dynamicFlagIndex, ReadOnlyCollection<string> tupleElementNames, ref int tupleElementIndex, int arity, bool escapeKeywordIdentifiers, out bool sawInvalidIdentifier); protected abstract void AppendTupleElement( StringBuilder builder, Type type, string nameOpt, ReadOnlyCollection<byte> dynamicFlags, ref int dynamicFlagIndex, ReadOnlyCollection<string> tupleElementNames, ref int tupleElementIndex, bool escapeKeywordIdentifiers, out bool sawInvalidIdentifier); protected abstract void AppendRankSpecifier(StringBuilder builder, int rank); protected abstract bool AppendSpecialTypeName(StringBuilder builder, Type type, bool isDynamic); #endregion } }
/* *************************************************************************** * This file is part of SharpNEAT - Evolution of Neural Networks. * * Copyright 2004-2006, 2009-2010 Colin Green ([email protected]) * * SharpNEAT is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * SharpNEAT is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with SharpNEAT. If not, see <http://www.gnu.org/licenses/>. */ using System.Collections.Generic; // Disables missing comment warnings for non-private variables. #pragma warning disable 1591 namespace SharpNeat.Phenomes.NeuralNets { /// <summary> /// This class is provided for debugging and educational purposes. FastCyclicNetwork is functionally /// equivalent and is much faster and therefore should be used instead of CyclicNetwork in most /// circumstances. /// /// A neural network class that represents a network with recurrent (cyclic) connections. Recurrent /// connections are handled by each neuron storing two values, a pre- and post-activation value /// (InputValue and OutputValue). This allows us to calculate the output value for the current /// iteration/timestep without modifying the output values from the previous iteration. That is, we /// calculate all of this timestep's state based on state from the previous timestep. /// /// When activating networks of this class the network's state is updated for a fixed number of /// timesteps, the number of which is specified by the maxIterations parameter on the constructor. /// See RelaxingCyclicNetwork for an alternative activation scheme. /// </summary> public class CyclicNetwork : IBlackBox { protected readonly List<Neuron> _neuronList; protected readonly List<Connection> _connectionList; // For efficiency we store the number of input and output neurons. protected readonly int _inputNeuronCount; protected readonly int _outputNeuronCount; protected readonly int _inputAndBiasNeuronCount; protected readonly int _timestepsPerActivation; // The input and output arrays that the black box uses for IO with the outside world. protected readonly double[] _inputSignalArray; protected readonly double[] _outputSignalArray; readonly SignalArray _inputSignalArrayWrapper; readonly SignalArray _outputSignalArrayWrapper; #region Constructor /// <summary> /// Constructs a CyclicNetwork with the provided pre-built neurons and connections. /// </summary> public CyclicNetwork(List<Neuron> neuronList, List<Connection> connectionList, int inputNeuronCount, int outputNeuronCount, int timestepsPerActivation) { _neuronList = neuronList; _connectionList = connectionList; _inputNeuronCount = inputNeuronCount; _outputNeuronCount = outputNeuronCount; _inputAndBiasNeuronCount = inputNeuronCount + 1; _timestepsPerActivation = timestepsPerActivation; _inputSignalArray = new double[_inputNeuronCount]; _outputSignalArray = new double[_outputNeuronCount]; _inputSignalArrayWrapper = new SignalArray(_inputSignalArray, 0, _inputNeuronCount); _outputSignalArrayWrapper = new SignalArray(_outputSignalArray, 0, outputNeuronCount); } #endregion #region IBlackBox Members /// <summary> /// Gets the number of inputs. /// </summary> public int InputCount { get { return _inputNeuronCount; } } /// <summary> /// Gets the number of outputs. /// </summary> public int OutputCount { get { return _outputNeuronCount; } } /// <summary> /// Gets an array for feeding input signals to the network. /// </summary> public ISignalArray InputSignalArray { get { return _inputSignalArrayWrapper; } } /// <summary> /// Gets an array of output signals from the network. /// </summary> public ISignalArray OutputSignalArray { get { return _outputSignalArrayWrapper; } } /// <summary> /// Gets a value indicating whether the internal state is valid. Always returns true for this class. /// </summary> public virtual bool IsStateValid { get { return true; } } /// <summary> /// Activate the network for a fixed number of timesteps defined by maxTimesteps is reached. /// </summary> public virtual void Activate() { // Copy input signals into input neurons. // Note. In fast implementations we can skip this step because the array is // part of the working data of the network. for(int i=0; i<_inputNeuronCount; i++) { // The +1 takes into account the bias neuron at index 0. // Note. we set the outputvalue of the input neurons, not the input value. This is because we // don't want the signal to pass through the neuron's activation function. _neuronList[i+1].OutputValue = _inputSignalArray[i]; } // Activate the network for a fixed number of timesteps. int connectionCount = _connectionList.Count; int neuronCount = _neuronList.Count; for(int i=0; i<_timestepsPerActivation; i++) { // Loop over all connections. // Calculate each connection's output signal by multiplying its weight by the output value // of its source neuron. // Add the connection's output value to the target neuron's input value. Neurons therefore // accumulate all input value from connections targeting them. for(int j=0; j<connectionCount; j++) { Connection connection = _connectionList[j]; connection.OutputValue = connection.SourceNeuron.OutputValue * connection.Weight; connection.TargetNeuron.InputValue += connection.OutputValue; } // Loop over all output and hidden neurons, passing their input signal through their activation // function to produce an output value. Note we skip bias and input neurons because they have a // fixed output value. for(int j=_inputAndBiasNeuronCount; j<neuronCount; j++) { Neuron neuron = _neuronList[j]; neuron.OutputValue = neuron.ActivationFunction.Calculate(neuron.InputValue, neuron.AuxiliaryArguments); // Reset input value, in preparation for the next timestep/iteration. neuron.InputValue = 0.0; } } // Copy the output neuron output values into the output signal array. for(int i=_inputAndBiasNeuronCount, outputIdx=0; outputIdx<_outputNeuronCount; i++, outputIdx++) { _outputSignalArray[outputIdx] = _neuronList[i].OutputValue; } } /// <summary> /// Reset the network's internal state. /// </summary> public virtual void ResetState() { // Reset neuron state for all but the bias neuron. // Input neurons - avoid setting InputValue. We only use the OutputValue of input neurons. // TODO: Not strictly necessary; input node state is always overwritten at the initial stages of network activation. for(int i=1; i<_inputAndBiasNeuronCount; i++) { _neuronList[i].OutputValue = 0.0; } // Reset input and output value of all remaining neurons (output and hidden neurons). int count = _neuronList.Count; for(int i=_inputAndBiasNeuronCount; i<count; i++) { _neuronList[i].InputValue = 0.0; _neuronList[i].OutputValue = 0.0; } // Reset connection states. count = _connectionList.Count; for(int i=0; i<count; i++) { _connectionList[i].OutputValue = 0.0; } } #endregion } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * * Contains some contributions under the Thrift Software License. * Please see doc/old-thrift-license.txt in the Thrift distribution for * details. */ using System; namespace FluentCassandra.Thrift.Protocol { /** * TProtocolDecorator forwards all requests to an enclosed TProtocol instance, * providing a way to author concise concrete decorator subclasses. While it has * no abstract methods, it is marked abstract as a reminder that by itself, * it does not modify the behaviour of the enclosed TProtocol. * * See p.175 of Design Patterns (by Gamma et al.) * See TMultiplexedProtocol */ public abstract class TProtocolDecorator : TProtocol { private TProtocol WrappedProtocol; /** * Encloses the specified protocol. * @param protocol All operations will be forward to this protocol. Must be non-null. */ public TProtocolDecorator(TProtocol protocol) : base( protocol.Transport) { WrappedProtocol = protocol; } public override void WriteMessageBegin(TMessage tMessage) { WrappedProtocol.WriteMessageBegin(tMessage); } public override void WriteMessageEnd() { WrappedProtocol.WriteMessageEnd(); } public override void WriteStructBegin(TStruct tStruct) { WrappedProtocol.WriteStructBegin(tStruct); } public override void WriteStructEnd() { WrappedProtocol.WriteStructEnd(); } public override void WriteFieldBegin(TField tField) { WrappedProtocol.WriteFieldBegin(tField); } public override void WriteFieldEnd() { WrappedProtocol.WriteFieldEnd(); } public override void WriteFieldStop() { WrappedProtocol.WriteFieldStop(); } public override void WriteMapBegin(TMap tMap) { WrappedProtocol.WriteMapBegin(tMap); } public override void WriteMapEnd() { WrappedProtocol.WriteMapEnd(); } public override void WriteListBegin(TList tList) { WrappedProtocol.WriteListBegin(tList); } public override void WriteListEnd() { WrappedProtocol.WriteListEnd(); } public override void WriteSetBegin(TSet tSet) { WrappedProtocol.WriteSetBegin(tSet); } public override void WriteSetEnd() { WrappedProtocol.WriteSetEnd(); } public override void WriteBool(bool b) { WrappedProtocol.WriteBool(b); } public override void WriteByte(sbyte b) { WrappedProtocol.WriteByte(b); } public override void WriteI16(short i) { WrappedProtocol.WriteI16(i); } public override void WriteI32(int i) { WrappedProtocol.WriteI32(i); } public override void WriteI64(long l) { WrappedProtocol.WriteI64(l); } public override void WriteDouble(double v) { WrappedProtocol.WriteDouble(v); } public override void WriteString(String s) { WrappedProtocol.WriteString(s); } public override void WriteBinary(byte[] bytes) { WrappedProtocol.WriteBinary(bytes); } public override TMessage ReadMessageBegin() { return WrappedProtocol.ReadMessageBegin(); } public override void ReadMessageEnd() { WrappedProtocol.ReadMessageEnd(); } public override TStruct ReadStructBegin() { return WrappedProtocol.ReadStructBegin(); } public override void ReadStructEnd() { WrappedProtocol.ReadStructEnd(); } public override TField ReadFieldBegin() { return WrappedProtocol.ReadFieldBegin(); } public override void ReadFieldEnd() { WrappedProtocol.ReadFieldEnd(); } public override TMap ReadMapBegin() { return WrappedProtocol.ReadMapBegin(); } public override void ReadMapEnd() { WrappedProtocol.ReadMapEnd(); } public override TList ReadListBegin() { return WrappedProtocol.ReadListBegin(); } public override void ReadListEnd() { WrappedProtocol.ReadListEnd(); } public override TSet ReadSetBegin() { return WrappedProtocol.ReadSetBegin(); } public override void ReadSetEnd() { WrappedProtocol.ReadSetEnd(); } public override bool ReadBool() { return WrappedProtocol.ReadBool(); } public override sbyte ReadByte() { return WrappedProtocol.ReadByte(); } public override short ReadI16() { return WrappedProtocol.ReadI16(); } public override int ReadI32() { return WrappedProtocol.ReadI32(); } public override long ReadI64() { return WrappedProtocol.ReadI64(); } public override double ReadDouble() { return WrappedProtocol.ReadDouble(); } public override String ReadString() { return WrappedProtocol.ReadString(); } public override byte[] ReadBinary() { return WrappedProtocol.ReadBinary(); } } }
using Core.Common.Reflect; using System; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Collections.Specialized; using System.ComponentModel.Composition; using System.Diagnostics; using System.Linq; using System.Reflection; using System.Text; using System.Threading.Tasks; using Core.Common.Crypto; namespace Core.Common.Data { public abstract class MultiSet { private IDictionary<Type, ICollection> sets = new Dictionary<Type, ICollection>(); protected abstract ICollection<T> MakeSet<T>() where T : class; private static MethodInfo createCollectionMethod; protected virtual void OnCollectionCreated(Type type, ICollection collection) { } protected virtual void OnCollectionRemoved(Type type, ICollection collection) { } protected virtual void OnItemAdded(Type type, ICollection collection, object item) { } protected virtual void OnItemRemoved(Type type, ICollection collection, object item) { } protected ICollection CreateSet(Type itemType) { if (createCollectionMethod == null) createCollectionMethod = typeof(AbstractDataContext).GetMethod("MakeSet", BindingFlags.NonPublic | BindingFlags.Instance); Trace.WriteLine("Creating Collection for " + itemType.Name); var method = createCollectionMethod.MakeGenericMethod(itemType); var collection = method.Invoke(this, new object[0]) as ICollection; return collection; } protected ICollection Set(Type type) { ICollection set; if (sets.TryGetValue(type, out set)) return set; set = CreateSet(type); sets[type] = set; return set; } public ICollection<T> Set<T>() where T : class { var collection = Set(typeof(T)); return collection as ICollection<T>; } protected IEnumerable<Type> Types { get { return sets.Keys; } } protected IEnumerable<KeyValuePair<Type, ICollection>> GetSets() { return sets; } protected void AddToSet(object item) { var type = item.GetType(); var collection = Set(type) as IContainer; collection.Add(item); } protected void RemoveFromSet(object item) { var type = item.GetType(); var collection = Set(type); var removeMethod = collection.GetType().GetMethod("Remove"); removeMethod.Invoke(collection, new[] { item }); } } public abstract class AbstractDataContext : MultiSet, IDataContext { private IDictionary<object, IEntry> entities = new Dictionary<object, IEntry>(); /// <summary> /// needs to return the hash value for the specified entity /// the hash needs to change only if entity's persistent values change /// </summary> /// <param name="entity"></param> /// <returns></returns> protected abstract object GetHash(object entity); protected abstract object GenerateId(object item); protected abstract IEntry CreateEntry(object item); /// <summary> /// needs to return the set of referenced entities including entity itself /// </summary> /// <param name="entity"></param> /// <returns></returns> protected abstract IEnumerable<object> GetReferencedEntities(object entity); public bool ChangeState(IEntry entry, EntityState oldState, EntityState newState, Action<EntityState> setState) { if (oldState == newState) return false; var oldHash = entry.Hash; var newHash = GetHash(entry.Value); setState(newState); if (newState == EntityState.Deleted || newState == EntityState.Detached) { RemoveFromSet(entry.Value); } else { AddToSet(entry.Value); } var references = GetReferencedEntities(entry.Value); foreach (var reference in references) { var e = Entry(reference); if (e.State == EntityState.Detached) { e.State = EntityState.Attached; } } if (DataChanged != null) DataChanged(this, new DataContextChangedEventArgs(entry)); return true; } public event DataChangedEventHandler DataChanged; public IEnumerable<IEntry> Entries { get { return entities.Values; } } public IEntry GetEntry(object entity) { if (entity == null) return null; lock (entities) { IEntry entry; if (!entities.TryGetValue(entity, out entry)) return null; return entry; } } public IEntry Entry(object entity) { if (entity == null) return null; var graph = GetReferencedEntities(entity); lock (entities) { foreach (var node in graph) { if (node == null) continue; if (entities.ContainsKey(node)) continue; var entry = CreateEntry(node) as SimpleEntry; entities[node] = entry; } return entities[entity]; } } public Task SaveAsync() { return DoSaveAsync(); } protected abstract Task DoSaveAsync(); protected abstract Task DoRefreshAsync(); public Task RefreshAsync() { return DoRefreshAsync(); } protected IEntry GetEntryById(object id) { return Entries.SingleOrDefault(e => e.Id.Equals(id)); } protected object GetById(object id) { var entry = GetEntryById(id); if (entry == null) return null; return entry.Value; } } }
using System; using System.Collections; using System.Collections.Generic; using System.IO; using System.Linq; using System.Text; using System.Web; using System.Web.Caching; using System.Web.UI; using System.Xml; using StackExchange.Profiling; using Umbraco.Core; using Umbraco.Core.Cache; using Umbraco.Core.Macros; using Umbraco.Core.Profiling; using Umbraco.Web; using Umbraco.Web.PublishedCache; using Umbraco.Web.PublishedCache.XmlPublishedCache; using Umbraco.Web.Routing; using Umbraco.Web.Templates; using umbraco.cms.businesslogic; using umbraco.cms.businesslogic.property; using umbraco.cms.businesslogic.web; using Umbraco.Core.IO; namespace umbraco.presentation.templateControls { public class ItemRenderer { public readonly static ItemRenderer Instance = new ItemRenderer(); /// <summary> /// Initializes a new instance of the <see cref="ItemRenderer"/> class. /// </summary> protected ItemRenderer() { } /// <summary> /// Renders the specified item. /// </summary> /// <param name="item">The item.</param> /// <param name="writer">The writer.</param> public virtual void Render(Item item, HtmlTextWriter writer) { if (item.DebugMode) { writer.AddAttribute(HtmlTextWriterAttribute.Title, string.Format("Field Tag: '{0}'", item.Field)); writer.AddAttribute("style", "border: 1px solid #fc6;"); writer.RenderBeginTag(HtmlTextWriterTag.Div); } try { StringWriter renderOutputWriter = new StringWriter(); HtmlTextWriter htmlWriter = new HtmlTextWriter(renderOutputWriter); foreach (Control control in item.Controls) { try { control.RenderControl(htmlWriter); } catch (Exception renderException) { // TODO: Validate that the current control is within the scope of a form control // Even controls that are inside this scope, can produce this error in async postback. HttpContext.Current.Trace.Warn("ItemRenderer", String.Format("Error rendering control {0} of {1}.", control.ClientID, item), renderException); } } // parse macros and execute the XSLT transformation on the result if not empty string renderOutput = renderOutputWriter.ToString(); string xsltTransformedOutput = renderOutput.Trim().Length == 0 ? String.Empty : XsltTransform(item.Xslt, renderOutput, item.XsltDisableEscaping); // handle text before/after xsltTransformedOutput = AddBeforeAfterText(xsltTransformedOutput, helper.FindAttribute(item.LegacyAttributes, "insertTextBefore"), helper.FindAttribute(item.LegacyAttributes, "insertTextAfter")); string finalResult = xsltTransformedOutput.Trim().Length > 0 ? xsltTransformedOutput : GetEmptyText(item); //Don't parse urls if a content item is assigned since that is taken care // of with the value converters if (item.ContentItem == null) { writer.Write(TemplateUtilities.ResolveUrlsFromTextString(finalResult)); } else { writer.Write(finalResult); } } catch (Exception renderException) { HttpContext.Current.Trace.Warn("ItemRenderer", String.Format("Error rendering {0}.", item), renderException); } finally { if (item.DebugMode) { writer.RenderEndTag(); } } } /// <summary> /// Renders the field contents. /// Checks via the NodeId attribute whether to fetch data from another page than the current one. /// </summary> /// <returns>A string of field contents (macros not parsed)</returns> protected virtual string GetFieldContents(Item item) { var tempElementContent = string.Empty; // if a nodeId is specified we should get the data from another page than the current one if (string.IsNullOrEmpty(item.NodeId) == false) { var tempNodeId = item.GetParsedNodeId(); if (tempNodeId != null && tempNodeId.Value != 0) { //moved the following from the catch block up as this will allow fallback options alt text etc to work var cache = Umbraco.Web.UmbracoContext.Current.ContentCache.InnerCache as PublishedContentCache; if (cache == null) throw new InvalidOperationException("Unsupported IPublishedContentCache, only the Xml one is supported."); var xml = cache.GetXml(Umbraco.Web.UmbracoContext.Current, Umbraco.Web.UmbracoContext.Current.InPreviewMode); var itemPage = new page(xml.GetElementById(tempNodeId.ToString())); tempElementContent = new item(item.ContentItem, itemPage.Elements, item.LegacyAttributes).FieldContent; } } else { // gets the field content from the current page (via the PageElements collection) tempElementContent = new item(item.ContentItem, item.PageElements, item.LegacyAttributes).FieldContent; } return tempElementContent; } /// <summary> /// Inits the specified item. To be called from the OnInit method of Item. /// </summary> /// <param name="item">The item.</param> public virtual void Init(Item item) { } /// <summary> /// Loads the specified item. To be called from the OnLoad method of Item. /// </summary> /// <param name="item">The item.</param> public virtual void Load(Item item) { using (DisposableTimer.DebugDuration<ItemRenderer>(string.Format("Item: {0}", item.Field))) { ParseMacros(item); } } /// <summary> /// Parses the macros inside the text, by creating child elements for each item. /// </summary> /// <param name="item">The item.</param> protected virtual void ParseMacros(Item item) { // do nothing if the macros have already been rendered if (item.Controls.Count > 0) return; var elementText = GetFieldContents(item); //Don't parse macros if there's a content item assigned since the content value // converters take care of that, just add the already parsed text if (item.ContentItem != null) { item.Controls.Add(new LiteralControl(elementText)); } else { using (DisposableTimer.DebugDuration<ItemRenderer>("Parsing Macros")) { MacroTagParser.ParseMacros( elementText, //callback for when a text block is parsed textBlock => item.Controls.Add(new LiteralControl(textBlock)), //callback for when a macro is parsed: (macroAlias, attributes) => { var macroControl = new Macro { Alias = macroAlias }; foreach (var i in attributes.Where(i => macroControl.Attributes[i.Key] == null)) { macroControl.Attributes.Add(i.Key, i.Value); } item.Controls.Add(macroControl); }); } } } /// <summary> /// Transforms the content using the XSLT attribute, if provided. /// </summary> /// <param name="xpath">The xpath expression.</param> /// <param name="itemData">The item's rendered content.</param> /// <param name="disableEscaping">if set to <c>true</c>, escaping is disabled.</param> /// <returns>The transformed content if the XSLT attribute is present, otherwise the original content.</returns> protected virtual string XsltTransform(string xpath, string itemData, bool disableEscaping) { if (!String.IsNullOrEmpty(xpath)) { // XML-encode the expression and add the itemData parameter to it string xpathEscaped = xpath.Replace("<", "&lt;").Replace(">", "&gt;").Replace("\"", "&quot;"); string xpathExpression = string.Format(xpathEscaped, "$itemData"); // prepare support for XSLT extensions StringBuilder namespaceList = new StringBuilder(); StringBuilder namespaceDeclaractions = new StringBuilder(); foreach (KeyValuePair<string, object> extension in macro.GetXsltExtensions()) { namespaceList.Append(extension.Key).Append(' '); namespaceDeclaractions.AppendFormat("xmlns:{0}=\"urn:{0}\" ", extension.Key); } // add the XSLT expression into the full XSLT document, together with the needed parameters string xslt = string.Format(Umbraco.Web.umbraco.presentation.umbraco.templateControls.Resources.InlineXslt, xpathExpression, disableEscaping ? "yes" : "no", namespaceList, namespaceDeclaractions); // create the parameter Dictionary<string, object> parameters = new Dictionary<string, object>(1); parameters.Add("itemData", itemData); // apply the XSLT transformation XmlTextReader xslReader = new XmlTextReader(new StringReader(xslt)); System.Xml.Xsl.XslCompiledTransform xsl = macro.CreateXsltTransform(xslReader, false); itemData = macro.GetXsltTransformResult(new XmlDocument(), xsl, parameters); xslReader.Close(); } return itemData; } protected string AddBeforeAfterText(string text, string before, string after) { if (!String.IsNullOrEmpty(text)) { if (!String.IsNullOrEmpty(before)) text = String.Format("{0}{1}", HttpContext.Current.Server.HtmlDecode(before), text); if (!String.IsNullOrEmpty(after)) text = String.Format("{0}{1}", text, HttpContext.Current.Server.HtmlDecode(after)); } return text; } /// <summary> /// Gets the text to display if the field contents are empty. /// </summary> /// <param name="item">The item.</param> /// <returns>The text to display.</returns> protected virtual string GetEmptyText(Item item) { return item.TextIfEmpty; } /// <summary> /// Gets the field content from database instead of the published XML via the APIs. /// </summary> /// <param name="itemAttributes"></param> /// <param name="nodeIdInt">The node id.</param> /// <param name="currentField">The field that should be fetched.</param> /// <returns>The contents of the <paramref name="currentField"/> from the <paramref name="nodeIdInt"/> content object</returns> [Obsolete("This is no longer used in the codebase and will be removed in future versions")] protected virtual string GetContentFromDatabase(AttributeCollectionAdapter itemAttributes, int nodeIdInt, string currentField) { var c = new Content(nodeIdInt); var property = c.getProperty(currentField); if (property == null) throw new ArgumentException(String.Format("Could not find property {0} of node {1}.", currentField, nodeIdInt)); var umbItem = new item(property.Value.ToString(), itemAttributes); var tempElementContent = umbItem.FieldContent; // If the current content object is a document object, we'll only output it if it's published if (c.nodeObjectType == Document._objectType) { try { var d = (Document)c; if (!d.Published) tempElementContent = ""; } catch { } } // Add the content to the cache if (!string.IsNullOrEmpty(tempElementContent)) { ApplicationContext.Current.ApplicationCache.InsertCacheItem( string.Format("{0}{1}_{2}", CacheKeys.ContentItemCacheKey, nodeIdInt, currentField), CacheItemPriority.Default, () => tempElementContent); } return tempElementContent; } /// <summary> /// Gets the content from cache. /// </summary> /// <param name="nodeIdInt">The node id.</param> /// <param name="field">The field.</param> /// <returns>The cached contents of the <paramref name="field"/> from the <paramref name="nodeIdInt"/> content object</returns> [Obsolete("This is no longer used in the codebase and will be removed in future versions")] protected virtual object GetContentFromCache(int nodeIdInt, string field) { var content = ApplicationContext.Current.ApplicationCache.GetCacheItem<object>( string.Format("{0}{1}_{2}", CacheKeys.ContentItemCacheKey, nodeIdInt, field)); return content; } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using Microsoft.Modeling; using Microsoft.Protocols.TestSuites.FileSharing.Common.Adapter; using Microsoft.Protocols.TestSuites.FileSharing.SMB2Model.Adapter; using Microsoft.Protocols.TestSuites.FileSharing.SMB2Model.Adapter.CreditMgmt; using Microsoft.Protocols.TestTools.StackSdk.FileAccessService.Smb2; using Microsoft.Xrt.Runtime; [assembly: NativeType("System.Diagnostics.Tracing.*")] namespace Microsoft.Protocols.TestSuites.FileSharing.SMB2Model.Model.CreditMgmt { /// <summary> /// This models behavior of credit management for SMB2 server /// Assumptions/Restrictions: /// 1. Underlying connection supports multi-credit request (i.e. no NetBIOS transport used) /// 2. Async/Compounded/Cancel requests are not covered, will cover in traditional test /// 3. /// </summary> public static class CreditMgmtModel { #region State /// <summary> /// Server model state /// </summary> public static ModelState state = ModelState.Uninitialized; /// <summary> /// The dialect after negotiation /// </summary> public static DialectRevision negotiateDialect; /// <summary> /// Indicate if supports multi-credit request /// </summary> public static bool isMultiCreditSupported; /// <summary> /// Server configuration related to model /// </summary> public static CreditMgmtConfig config; /// <summary> /// Request that server model is handling /// </summary> public static ModelSMB2Request request; /// <summary> /// Assistant state. Indicate if the parameters result in server terminates connection /// </summary> public static bool expectDisconnection; /// <summary> /// Assistant state. Indicate if reaching an accepting state to make sure all ending states are what we expect /// </summary> public static bool acceptingCondition; #endregion #region Actions /// <summary> /// Call for loading server configuration /// </summary> [Rule(Action = "call ReadConfig(out _)")] public static void ReadConfigCall() { Condition.IsTrue(state == ModelState.Uninitialized); } /// <summary> /// Return for loading server configuration /// </summary> /// <param name="c">Server configuration related to model</param> [Rule(Action = "return ReadConfig(out c)")] public static void ReadConfigReturn(CreditMgmtConfig c) { Condition.IsTrue(state == ModelState.Uninitialized); Condition.IsNotNull(c); Condition.IsTrue( c.MaxSmbVersionSupported == ModelDialectRevision.Smb2002 || c.MaxSmbVersionSupported == ModelDialectRevision.Smb21 || c.MaxSmbVersionSupported == ModelDialectRevision.Smb30 || c.MaxSmbVersionSupported == ModelDialectRevision.Smb302); negotiateDialect = DialectRevision.Smb2Unknown; config = c; request = null; state = ModelState.Initialized; acceptingCondition = false; } /// <summary> /// Setup connection by perform following /// 1. Negotiate /// 2. SessionSetup /// 3. TreeConnect /// 4. Create /// </summary> /// <param name="clientMaxDialect">Max SMB2 dialect that client supports</param> [Rule] public static void SetupConnection(ModelDialectRevision clientMaxDialect) { Condition.IsTrue(state == ModelState.Initialized); Condition.IsNull(request); negotiateDialect = ModelHelper.DetermineNegotiateDialect(clientMaxDialect, config.MaxSmbVersionSupported); if ((negotiateDialect == DialectRevision.Smb21 || ModelUtility.IsSmb3xFamily(negotiateDialect)) && config.IsMultiCreditSupportedOnServer) { ModelHelper.Log( LogType.Requirement, "3.3.5.4: If the common dialect is SMB 2.1 or 3.x dialect family and the underlying connection is either TCP port 445 or RDMA," + "Connection.SupportsMultiCredit MUST be set to TRUE; otherwise, it MUST be set to FALSE."); ModelHelper.Log( LogType.TestInfo, "Common dialect is {0} and server implementation {1} multicredit", negotiateDialect, config.IsMultiCreditSupportedOnServer ? "supports" : "does not support"); isMultiCreditSupported = true; ModelHelper.Log( LogType.TestInfo, "Connection.SupportsMultiCredit is set to TRUE"); } else { isMultiCreditSupported = false; ModelHelper.Log( LogType.TestInfo, "Connection.SupportsMultiCredit is set to FALSE"); } state = ModelState.Connected; } /// <summary> /// Disconnect the connection on receiving fault request /// </summary> [Rule] public static void ExpectDisconnect() { Condition.IsTrue(state == ModelState.Connected); Condition.IsTrue(expectDisconnection); state = ModelState.Disconnected; acceptingCondition = true; } /// <summary> /// Request for credit operation /// </summary> /// <param name="midType">Message Id in the request</param> /// <param name="creditCharge">Credit charge in the request</param> /// <param name="creditRequestNum">Requested credit # in the request</param> /// <param name="payloadSize">Payload size in the request/response</param> /// <param name="payloadType">Payload type indicating if it's payload for request or response</param> [Rule] public static void CreditOperationRequest( ModelMidType midType, ModelCreditCharge creditCharge, ModelCreditRequestNum creditRequestNum, ModelPayloadSize payloadSize, ModelPayloadType payloadType) { Condition.IsTrue(state == ModelState.Connected); Condition.IsNull(request); Combination.Isolated(midType == ModelMidType.UsedMid); Combination.Isolated(midType == ModelMidType.UnavailableMid); Combination.Isolated(creditCharge == ModelCreditCharge.CreditChargeExceedBoundary); Combination.Isolated(payloadSize == ModelPayloadSize.PayloadSizeLargerThanBoundary); //Pairwise the rest parameters Combination.NWise(2, creditCharge, creditRequestNum, payloadSize, payloadType); request = new ModelCreditOperationRequest( midType, creditCharge, creditRequestNum, payloadSize, payloadType); // NOTE: creditCharge will be ignored if multicredit is not supported if (midType == ModelMidType.UsedMid || midType == ModelMidType.UnavailableMid || (isMultiCreditSupported && creditCharge == ModelCreditCharge.CreditChargeExceedBoundary)) { ModelHelper.Log( LogType.Requirement, "3.3.5.2.3: If the server determines that the MessageId or the range of MessageIds for the incoming request is not valid," + " the server SHOULD<202> terminate the connection. Otherwise, the server MUST remove the MessageId or the range of MessageIds from the Connection.CommandSequenceWindow."); //Not add Platform!=NonWindows because NonWindows could also drop connection if (midType == ModelMidType.UsedMid || midType == ModelMidType.UnavailableMid) { ModelHelper.Log(LogType.TestTag, TestTag.InvalidIdentifier); } if (isMultiCreditSupported && creditCharge == ModelCreditCharge.CreditChargeExceedBoundary) { ModelHelper.Log(LogType.TestTag, TestTag.OutOfBoundary); } expectDisconnection = true; ModelHelper.Log( LogType.TestInfo, "Test case is expecting server disconnect the connection"); ModelHelper.Log( LogType.TestInfo, "Connection.SupportsMultiCredit is set to {0}, messageId type is {1}, creditCharge type is {2}", isMultiCreditSupported, midType, creditCharge); return; } if (!isMultiCreditSupported && payloadSize == ModelPayloadSize.PayloadSizeLargerThanBoundary) { ModelHelper.Log( LogType.Requirement, "3.3.5.2: If Connection.SupportsMultiCredit is FALSE and the size of the request is greater than 68*1024 bytes," + " the server SHOULD<200> terminate the connection"); //Ignore following product behavior as known issue for now ModelHelper.Log( LogType.Requirement, "<200> Section 3.3.5.2: Windows 7 without [MSKB-2536275], and Windows Server 2008 R2 without [MSKB-2536275] terminate the connection when the size of the request is greater than 64*1024 bytes." + " Windows Vista SP1 and Windows Server 2008 on Direct TCP transport disconnect the connection if the size of the message exceeds 128*1024 bytes, and Windows Vista SP1 and Windows Server 2008 on NetBIOS over TCP transport will disconnect the connection if the size of the message exceeds 64*1024 bytes"); ModelHelper.Log(LogType.TestTag, TestTag.OutOfBoundary); expectDisconnection = true; ModelHelper.Log( LogType.TestInfo, "Test case is expecting server to drop the connection"); ModelHelper.Log( LogType.TestInfo, "Connection.SupportsMultiCredit is set to {0}, messageId type is {1}, creditCharge type is {2}", isMultiCreditSupported, midType, creditCharge); } } /// <summary> /// Response for credit operation /// </summary> /// <param name="status">Status in response</param> /// <param name="creditResponse">Credit granted by server</param> /// <param name="c">Server configurations</param> [Rule] public static void CreditOperationResponse(ModelSmb2Status status, uint creditResponse, CreditMgmtConfig c) { Condition.IsTrue(state == ModelState.Connected); Condition.IsTrue(c.Platform == config.Platform); ModelCreditOperationRequest creditOperationRequest = ModelHelper.RetrieveOutstandingRequest<ModelCreditOperationRequest>(ref request); if (config.Platform != Platform.NonWindows) { if (creditOperationRequest.creditRequestNum == ModelCreditRequestNum.CreditRequestSetNonZero) { ModelHelper.Log( LogType.Requirement, "3.3.1.2: The server SHOULD<151> grant the client a non-zero value of credits in response to any non-zero value requested"); ModelHelper.Log( LogType.TestInfo, "Platform is {0}", config.Platform); Condition.IsTrue(creditResponse != 0); } } if (creditOperationRequest.midType == ModelMidType.UsedMid || creditOperationRequest.midType == ModelMidType.UnavailableMid || (isMultiCreditSupported && creditOperationRequest.creditCharge == ModelCreditCharge.CreditChargeExceedBoundary)) { ModelHelper.Log( LogType.Requirement, "3.3.5.2.3: If the server determines that the MessageId or the range of MessageIds for the incoming request is not valid," + " the server SHOULD<202> terminate the connection. Otherwise, the server MUST remove the MessageId or the range of MessageIds from the Connection.CommandSequenceWindow."); if (creditOperationRequest.midType == ModelMidType.UsedMid || creditOperationRequest.midType == ModelMidType.UnavailableMid) { ModelHelper.Log(LogType.TestTag, TestTag.InvalidIdentifier); } if (isMultiCreditSupported && creditOperationRequest.creditCharge == ModelCreditCharge.CreditChargeExceedBoundary) { ModelHelper.Log(LogType.TestTag, TestTag.OutOfBoundary); } //Only NonWindows would run into this case when not following the SHOULD requirement Condition.IsTrue(config.Platform == Platform.NonWindows); Condition.IsTrue(status != ModelSmb2Status.STATUS_SUCCESS); acceptingCondition = true; return; } if (isMultiCreditSupported) { ModelHelper.Log( LogType.Requirement, "3.3.5.2.5: If Connection.SupportsMultiCredit is TRUE," + " the server MUST verify the CreditCharge field in the SMB2 header and the payload size (the size of the data within the variable-length field) of the request or the maximum response size"); ModelHelper.Log( LogType.TestInfo, "Connection.SupportsMultiCredit is TRUE"); if (creditOperationRequest.creditCharge == ModelCreditCharge.CreditChargeSetZero) { //NOTE: When multi-credit request is not supported or credit charge = 0 // Treat PayloadSize > 64K if use "LargerThanCreditCharge" if (creditOperationRequest.payloadSize == ModelPayloadSize.PayloadSizeLargerThanBoundary) { ModelHelper.Log( LogType.Requirement, "If CreditCharge is zero and the payload size of the request or the maximum response size is greater than 64 kilobytes," + " the server MUST fail the request with the error code STATUS_INVALID_PARAMETER."); ModelHelper.Log( LogType.TestInfo, "Credit charge type in request is {0}, payload size type is {1}", creditOperationRequest.creditCharge, creditOperationRequest.payloadSize); ModelHelper.Log(LogType.TestTag, TestTag.OutOfBoundary); Condition.IsTrue(status == ModelSmb2Status.STATUS_INVALID_PARAMETER); acceptingCondition = true; //Reaching an accepting condition of exploration return; } } else { if (creditOperationRequest.payloadSize == ModelPayloadSize.PayloadSizeLargerThanBoundary) { ModelHelper.Log( LogType.Requirement, "If CreditCharge is greater than zero, the server MUST calculate the expected CreditCharge for the current operation using the formula specified in section 3.1.5.2." + " If the calculated credit number is greater than the CreditCharge, the server MUST fail the request with the error code STATUS_INVALID_PARAMETER."); ModelHelper.Log( LogType.TestInfo, "Credit charge type in request is {0}, payload size type is {1}," + " that's calculated credit number based on payload size is greater than the CreditCharge", creditOperationRequest.creditCharge, creditOperationRequest.payloadSize); ModelHelper.Log(LogType.TestTag, TestTag.OutOfBoundary); Condition.IsTrue(status == ModelSmb2Status.STATUS_INVALID_PARAMETER); acceptingCondition = true; //Reaching an accepting condition of exploration return; } } } Condition.IsTrue(status == ModelSmb2Status.STATUS_SUCCESS); acceptingCondition = true; } [AcceptingStateCondition] public static bool AcceptingCondtion() { return acceptingCondition; } #endregion } }
/* * (c) 2008 MOSA - The Managed Operating System Alliance * * Licensed under the terms of the New BSD License. * * Authors: * Simon Wollwage (rootnode) <[email protected]> */ #if false using System; using AGG.PixelFormat; namespace AGG { /* //========================================================line_image_scale public class line_image_scale { IPixelFormat m_source; double m_height; double m_scale; public line_image_scale(IPixelFormat src, double height) { m_source = (src); m_height = (height); m_scale = (src.height() / height); } public double width() { return m_source.width(); } public double height() { return m_height; } public RGBA_Bytes pixel(int x, int y) { double src_y = (y + 0.5) * m_scale - 0.5; int h = m_source.height() - 1; int y1 = ufloor(src_y); int y2 = y1 + 1; RGBA_Bytes pix1 = (y1 < 0) ? new no_color() : m_source.pixel(x, y1); RGBA_Bytes pix2 = (y2 > h) ? no_color() : m_source.pixel(x, y2); return pix1.gradient(pix2, src_y - y1); } }; */ //======================================================line_image_pattern public class line_image_pattern { IPatternFilter m_filter; int m_dilation; int m_dilation_hr; RasterBuffer m_buf; IntPtr m_data = (IntPtr)0; int m_DataSizeInBytes = 0; int m_width; int m_height; int m_width_hr; int m_half_height_hr; int m_offset_y_hr; //-------------------------------------------------------------------- public line_image_pattern(IPatternFilter filter) { m_filter=filter; m_dilation=(filter.dilation() + 1); m_dilation_hr=(m_dilation << LineAABasics.line_subpixel_shift); m_width=(0); m_height=(0); m_width_hr=(0); m_half_height_hr=(0); m_offset_y_hr=(0); } ~line_image_pattern() { if(m_DataSizeInBytes > 0) { System.Runtime.InteropServices.Marshal.FreeHGlobal(m_data); } } // Create //-------------------------------------------------------------------- public line_image_pattern(IPatternFilter filter, RasterBuffer src) { m_filter=(filter); m_dilation=(filter.dilation() + 1); m_dilation_hr=(m_dilation << LineAABasics.line_subpixel_shift); m_width=0; m_height=0; m_width_hr=0; m_half_height_hr=0; m_offset_y_hr=(0); m_buf = src; create(src); } // Create //-------------------------------------------------------------------- public void create(RasterBuffer src) { m_height = (int)agg_basics.uceil(src.Height()); m_width = (int)agg_basics.uceil(src.Width()); m_width_hr = (int)agg_basics.uround(src.Width() * LineAABasics.line_subpixel_scale); m_half_height_hr = (int)agg_basics.uround(src.Height() * LineAABasics.line_subpixel_scale / 2); m_offset_y_hr = m_dilation_hr + m_half_height_hr - LineAABasics.line_subpixel_scale / 2; m_half_height_hr += LineAABasics.line_subpixel_scale / 2; int NewSizeInBytes = (m_width + m_dilation * 2) * (m_height + m_dilation * 2); if (m_DataSizeInBytes < NewSizeInBytes) { if (m_data != null) { System.Runtime.InteropServices.Marshal.FreeHGlobal(m_data); } m_DataSizeInBytes = NewSizeInBytes; m_data = System.Runtime.InteropServices.Marshal.AllocHGlobal(m_DataSizeInBytes); } unsafe { m_buf.attach((byte*)m_data, (uint)(m_width + m_dilation * 2), (uint)(m_height + m_dilation * 2), (int)(m_width + m_dilation * 2), 32); int x, y; RGBA_Bytes* d1; RGBA_Bytes* d2; for (y = 0; y < m_height; y++) { d1 = (RGBA_Bytes*)m_buf.GetPixelPointer(y + m_dilation) + m_dilation; for (x = 0; x < m_width; x++) { *d1++ = *(RGBA_Bytes*)src.GetPixelPointer(x, y); } } RGBA_Bytes* s1; RGBA_Bytes* s2; RGBA_Bytes noColor = (RGBA_Bytes)RGBA_Bytes.no_color(); for (y = 0; y < m_dilation; y++) { //s1 = m_buf.GetPixelPointer(m_height + m_dilation - 1) + m_dilation; //s2 = m_buf.GetPixelPointer(m_dilation) + m_dilation; d1 = (RGBA_Bytes*)m_buf.GetPixelPointer(m_dilation + m_height + y) + m_dilation; d2 = (RGBA_Bytes*)m_buf.GetPixelPointer(m_dilation - y - 1) + m_dilation; for (x = 0; x < m_width; x++) { //*d1++ = RGBA_Bytes(*s1++, 0); //*d2++ = RGBA_Bytes(*s2++, 0); *d1++ = noColor; *d2++ = noColor; } } int h = m_height + m_dilation * 2; for (y = 0; y < h; y++) { s1 = (RGBA_Bytes*)m_buf.GetPixelPointer(y) + m_dilation; s2 = (RGBA_Bytes*)m_buf.GetPixelPointer(y) + m_dilation + m_width; d1 = (RGBA_Bytes*)m_buf.GetPixelPointer(y) + m_dilation + m_width; d2 = (RGBA_Bytes*)m_buf.GetPixelPointer(y) + m_dilation; for (x = 0; x < m_dilation; x++) { *d1++ = *s1++; *--d2 = *--s2; } } } } //-------------------------------------------------------------------- public int pattern_width() { return m_width_hr; } public int line_width() { return m_half_height_hr; } public double width() { return m_height; } //-------------------------------------------------------------------- public unsafe void pixel(RGBA_Bytes* p, int x, int y) { m_filter.pixel_high_res(m_buf, p, x % m_width_hr + m_dilation_hr, y + m_offset_y_hr); } //-------------------------------------------------------------------- public IPatternFilter filter() { return m_filter; } }; /* //=================================================line_image_pattern_pow2 public class line_image_pattern_pow2 : line_image_pattern<IPatternFilter> { uint m_mask; //-------------------------------------------------------------------- public line_image_pattern_pow2(IPatternFilter filter) : line_image_pattern<IPatternFilter>(filter), m_mask(line_subpixel_mask) {} //-------------------------------------------------------------------- public line_image_pattern_pow2(IPatternFilter filter, RasterBuffer src) : line_image_pattern<IPatternFilter>(filter), m_mask(line_subpixel_mask) { create(src); } //-------------------------------------------------------------------- public void create(RasterBuffer src) { line_image_pattern<IPatternFilter>::create(src); m_mask = 1; while(m_mask < base_type::m_width) { m_mask <<= 1; m_mask |= 1; } m_mask <<= line_subpixel_shift - 1; m_mask |= line_subpixel_mask; base_type::m_width_hr = m_mask + 1; } //-------------------------------------------------------------------- public void pixel(RGBA_Bytes* p, int x, int y) { base_type::m_filter->pixel_high_res( base_type::m_buf.rows(), p, (x & m_mask) + base_type::m_dilation_hr, y + base_type::m_offset_y_hr); } }; */ //===================================================distance_interpolator4 public class distance_interpolator4 { int m_dx; int m_dy; int m_dx_start; int m_dy_start; int m_dx_pict; int m_dy_pict; int m_dx_end; int m_dy_end; int m_dist; int m_dist_start; int m_dist_pict; int m_dist_end; int m_len; //--------------------------------------------------------------------- public distance_interpolator4() {} public distance_interpolator4(int x1, int y1, int x2, int y2, int sx, int sy, int ex, int ey, int len, double scale, int x, int y) { m_dx=(x2 - x1); m_dy=(y2 - y1); m_dx_start = (LineAABasics.line_mr(sx) - LineAABasics.line_mr(x1)); m_dy_start = (LineAABasics.line_mr(sy) - LineAABasics.line_mr(y1)); m_dx_end = (LineAABasics.line_mr(ex) - LineAABasics.line_mr(x2)); m_dy_end = (LineAABasics.line_mr(ey) - LineAABasics.line_mr(y2)); m_dist = (agg_basics.iround((double)(x + LineAABasics.line_subpixel_scale / 2 - x2) * (double)(m_dy) - (double)(y + LineAABasics.line_subpixel_scale / 2 - y2) * (double)(m_dx))); m_dist_start = ((LineAABasics.line_mr(x + LineAABasics.line_subpixel_scale / 2) - LineAABasics.line_mr(sx)) * m_dy_start - (LineAABasics.line_mr(y + LineAABasics.line_subpixel_scale / 2) - LineAABasics.line_mr(sy)) * m_dx_start); m_dist_end = ((LineAABasics.line_mr(x + LineAABasics.line_subpixel_scale / 2) - LineAABasics.line_mr(ex)) * m_dy_end - (LineAABasics.line_mr(y + LineAABasics.line_subpixel_scale / 2) - LineAABasics.line_mr(ey)) * m_dx_end); m_len=(int)(agg_basics.uround(len / scale)); double d = len * scale; int dx = agg_basics.iround(((x2 - x1) << LineAABasics.line_subpixel_shift) / d); int dy = agg_basics.iround(((y2 - y1) << LineAABasics.line_subpixel_shift) / d); m_dx_pict = -dy; m_dy_pict = dx; m_dist_pict = ((x + LineAABasics.line_subpixel_scale / 2 - (x1 - dy)) * m_dy_pict - (y + LineAABasics.line_subpixel_scale / 2 - (y1 + dx)) * m_dx_pict) >> LineAABasics.line_subpixel_shift; m_dx <<= LineAABasics.line_subpixel_shift; m_dy <<= LineAABasics.line_subpixel_shift; m_dx_start <<= LineAABasics.line_mr_subpixel_shift; m_dy_start <<= LineAABasics.line_mr_subpixel_shift; m_dx_end <<= LineAABasics.line_mr_subpixel_shift; m_dy_end <<= LineAABasics.line_mr_subpixel_shift; } //--------------------------------------------------------------------- public void inc_x() { m_dist += m_dy; m_dist_start += m_dy_start; m_dist_pict += m_dy_pict; m_dist_end += m_dy_end; } //--------------------------------------------------------------------- public void dec_x() { m_dist -= m_dy; m_dist_start -= m_dy_start; m_dist_pict -= m_dy_pict; m_dist_end -= m_dy_end; } //--------------------------------------------------------------------- public void inc_y() { m_dist -= m_dx; m_dist_start -= m_dx_start; m_dist_pict -= m_dx_pict; m_dist_end -= m_dx_end; } //--------------------------------------------------------------------- public void dec_y() { m_dist += m_dx; m_dist_start += m_dx_start; m_dist_pict += m_dx_pict; m_dist_end += m_dx_end; } //--------------------------------------------------------------------- public void inc_x(int dy) { m_dist += m_dy; m_dist_start += m_dy_start; m_dist_pict += m_dy_pict; m_dist_end += m_dy_end; if(dy > 0) { m_dist -= m_dx; m_dist_start -= m_dx_start; m_dist_pict -= m_dx_pict; m_dist_end -= m_dx_end; } if(dy < 0) { m_dist += m_dx; m_dist_start += m_dx_start; m_dist_pict += m_dx_pict; m_dist_end += m_dx_end; } } //--------------------------------------------------------------------- public void dec_x(int dy) { m_dist -= m_dy; m_dist_start -= m_dy_start; m_dist_pict -= m_dy_pict; m_dist_end -= m_dy_end; if(dy > 0) { m_dist -= m_dx; m_dist_start -= m_dx_start; m_dist_pict -= m_dx_pict; m_dist_end -= m_dx_end; } if(dy < 0) { m_dist += m_dx; m_dist_start += m_dx_start; m_dist_pict += m_dx_pict; m_dist_end += m_dx_end; } } //--------------------------------------------------------------------- public void inc_y(int dx) { m_dist -= m_dx; m_dist_start -= m_dx_start; m_dist_pict -= m_dx_pict; m_dist_end -= m_dx_end; if(dx > 0) { m_dist += m_dy; m_dist_start += m_dy_start; m_dist_pict += m_dy_pict; m_dist_end += m_dy_end; } if(dx < 0) { m_dist -= m_dy; m_dist_start -= m_dy_start; m_dist_pict -= m_dy_pict; m_dist_end -= m_dy_end; } } //--------------------------------------------------------------------- public void dec_y(int dx) { m_dist += m_dx; m_dist_start += m_dx_start; m_dist_pict += m_dx_pict; m_dist_end += m_dx_end; if(dx > 0) { m_dist += m_dy; m_dist_start += m_dy_start; m_dist_pict += m_dy_pict; m_dist_end += m_dy_end; } if(dx < 0) { m_dist -= m_dy; m_dist_start -= m_dy_start; m_dist_pict -= m_dy_pict; m_dist_end -= m_dy_end; } } //--------------------------------------------------------------------- public int dist() { return m_dist; } public int dist_start() { return m_dist_start; } public int dist_pict() { return m_dist_pict; } public int dist_end() { return m_dist_end; } //--------------------------------------------------------------------- public int dx() { return m_dx; } public int dy() { return m_dy; } public int dx_start() { return m_dx_start; } public int dy_start() { return m_dy_start; } public int dx_pict() { return m_dx_pict; } public int dy_pict() { return m_dy_pict; } public int dx_end() { return m_dx_end; } public int dy_end() { return m_dy_end; } public int len() { return m_len; } }; //==================================================line_interpolator_image public class line_interpolator_image { line_parameters m_lp; dda2_line_interpolator m_li; distance_interpolator4 m_di; IPixelFormat m_ren; int m_plen; int m_x; int m_y; int m_old_x; int m_old_y; int m_count; int m_width; int m_max_extent; int m_start; int m_step; int[] m_dist_pos = new int[max_half_width + 1]; RGBA_Bytes[] m_colors = new RGBA_Bytes[max_half_width * 2 + 4]; //--------------------------------------------------------------------- public const int max_half_width = 64; //--------------------------------------------------------------------- public line_interpolator_image(IPixelFormat ren, line_parameters lp, int sx, int sy, int ex, int ey, int pattern_start, double scale_x) { m_lp=(lp); m_li = new dda2_line_interpolator(lp.vertical ? line_dbl_hr(lp.x2 - lp.x1) : line_dbl_hr(lp.y2 - lp.y1), lp.vertical ? abs(lp.y2 - lp.y1) : abs(lp.x2 - lp.x1) + 1); m_di = new distance_interpolator4(lp.x1, lp.y1, lp.x2, lp.y2, sx, sy, ex, ey, lp.len, scale_x, lp.x1 & ~line_subpixel_mask, lp.y1 & ~line_subpixel_mask); m_ren=(ren); m_x=(lp.x1 >> line_subpixel_shift); m_y=(lp.y1 >> line_subpixel_shift); m_old_x=(m_x); m_old_y=(m_y); m_count=((lp.vertical ? abs((lp.y2 >> line_subpixel_shift) - m_y) : abs((lp.x2 >> line_subpixel_shift) - m_x))); m_width=(ren.subpixel_width()); //m_max_extent(m_width >> (line_subpixel_shift - 2)); m_max_extent=((m_width + LineAABasics.line_subpixel_scale) >> line_subpixel_shift); m_start=(pattern_start + (m_max_extent + 2) * ren.pattern_width()); m_step=(0); dda2_line_interpolator li = new dda2_line_interpolator(0, lp.vertical ? (lp.dy << LineAABasics.line_subpixel_shift) : (lp.dx << LineAABasics.line_subpixel_shift), lp.len); uint i; int stop = m_width + LineAABasics.line_subpixel_scale * 2; for(i = 0; i < max_half_width; ++i) { m_dist_pos[i] = li.y(); if(m_dist_pos[i] >= stop) break; ++li; } m_dist_pos[i] = 0x7FFF0000; int dist1_start; int dist2_start; int npix = 1; if(lp.vertical) { do { --m_li; m_y -= lp.inc; m_x = (m_lp.x1 + m_li.y()) >> line_subpixel_shift; if(lp.inc > 0) m_di.dec_y(m_x - m_old_x); else m_di.inc_y(m_x - m_old_x); m_old_x = m_x; dist1_start = dist2_start = m_di.dist_start(); int dx = 0; if(dist1_start < 0) ++npix; do { dist1_start += m_di.dy_start(); dist2_start -= m_di.dy_start(); if(dist1_start < 0) ++npix; if(dist2_start < 0) ++npix; ++dx; } while(m_dist_pos[dx] <= m_width); if(npix == 0) break; npix = 0; } while(--m_step >= -m_max_extent); } else { do { --m_li; m_x -= lp.inc; m_y = (m_lp.y1 + m_li.y()) >> line_subpixel_shift; if(lp.inc > 0) m_di.dec_x(m_y - m_old_y); else m_di.inc_x(m_y - m_old_y); m_old_y = m_y; dist1_start = dist2_start = m_di.dist_start(); int dy = 0; if(dist1_start < 0) ++npix; do { dist1_start -= m_di.dx_start(); dist2_start += m_di.dx_start(); if(dist1_start < 0) ++npix; if(dist2_start < 0) ++npix; ++dy; } while(m_dist_pos[dy] <= m_width); if(npix == 0) break; npix = 0; } while(--m_step >= -m_max_extent); } m_li.adjust_forward(); m_step -= m_max_extent; } //--------------------------------------------------------------------- public bool step_hor() { ++m_li; m_x += m_lp.inc; m_y = (m_lp.y1 + m_li.y()) >> line_subpixel_shift; if(m_lp.inc > 0) m_di.inc_x(m_y - m_old_y); else m_di.dec_x(m_y - m_old_y); m_old_y = m_y; int s1 = m_di.dist() / m_lp.len; int s2 = -s1; if(m_lp.inc < 0) s1 = -s1; int dist_start; int dist_pict; int dist_end; int dy; int dist; dist_start = m_di.dist_start(); dist_pict = m_di.dist_pict() + m_start; dist_end = m_di.dist_end(); RGBA_Bytes* p0 = m_colors + max_half_width + 2; RGBA_Bytes* p1 = p0; int npix = 0; p1->clear(); if(dist_end > 0) { if(dist_start <= 0) { m_ren.pixel(p1, dist_pict, s2); } ++npix; } ++p1; dy = 1; while((dist = m_dist_pos[dy]) - s1 <= m_width) { dist_start -= m_di.dx_start(); dist_pict -= m_di.dx_pict(); dist_end -= m_di.dx_end(); p1->clear(); if(dist_end > 0 && dist_start <= 0) { if(m_lp.inc > 0) dist = -dist; m_ren.pixel(p1, dist_pict, s2 - dist); ++npix; } ++p1; ++dy; } dy = 1; dist_start = m_di.dist_start(); dist_pict = m_di.dist_pict() + m_start; dist_end = m_di.dist_end(); while((dist = m_dist_pos[dy]) + s1 <= m_width) { dist_start += m_di.dx_start(); dist_pict += m_di.dx_pict(); dist_end += m_di.dx_end(); --p0; p0->clear(); if(dist_end > 0 && dist_start <= 0) { if(m_lp.inc > 0) dist = -dist; m_ren.pixel(p0, dist_pict, s2 + dist); ++npix; } ++dy; } m_ren.blend_color_vspan(m_x, m_y - dy + 1, (uint)(p1 - p0), p0); return npix && ++m_step < m_count; } //--------------------------------------------------------------------- public bool step_ver() { ++m_li; m_y += m_lp.inc; m_x = (m_lp.x1 + m_li.y()) >> line_subpixel_shift; if(m_lp.inc > 0) m_di.inc_y(m_x - m_old_x); else m_di.dec_y(m_x - m_old_x); m_old_x = m_x; int s1 = m_di.dist() / m_lp.len; int s2 = -s1; if(m_lp.inc > 0) s1 = -s1; int dist_start; int dist_pict; int dist_end; int dist; int dx; dist_start = m_di.dist_start(); dist_pict = m_di.dist_pict() + m_start; dist_end = m_di.dist_end(); RGBA_Bytes* p0 = m_colors + max_half_width + 2; RGBA_Bytes* p1 = p0; int npix = 0; p1->clear(); if(dist_end > 0) { if(dist_start <= 0) { m_ren.pixel(p1, dist_pict, s2); } ++npix; } ++p1; dx = 1; while((dist = m_dist_pos[dx]) - s1 <= m_width) { dist_start += m_di.dy_start(); dist_pict += m_di.dy_pict(); dist_end += m_di.dy_end(); p1->clear(); if(dist_end > 0 && dist_start <= 0) { if(m_lp.inc > 0) dist = -dist; m_ren.pixel(p1, dist_pict, s2 + dist); ++npix; } ++p1; ++dx; } dx = 1; dist_start = m_di.dist_start(); dist_pict = m_di.dist_pict() + m_start; dist_end = m_di.dist_end(); while((dist = m_dist_pos[dx]) + s1 <= m_width) { dist_start -= m_di.dy_start(); dist_pict -= m_di.dy_pict(); dist_end -= m_di.dy_end(); --p0; p0->clear(); if(dist_end > 0 && dist_start <= 0) { if(m_lp.inc > 0) dist = -dist; m_ren.pixel(p0, dist_pict, s2 - dist); ++npix; } ++dx; } m_ren.blend_color_hspan(m_x - dx + 1, m_y, (uint)(p1 - p0), p0); return npix && ++m_step < m_count; } //--------------------------------------------------------------------- public int pattern_end() { return m_start + m_di.len(); } //--------------------------------------------------------------------- public bool vertical() { return m_lp.vertical; } public int width() { return m_width; } public int count() { return m_count; } }; //===================================================renderer_outline_image //template<class BaseRenderer, class ImagePattern> public class renderer_outline_image { IPixelFormat m_ren; line_image_pattern m_pattern; int m_start; double m_scale_x; rect_i m_clip_box; bool m_clipping; //--------------------------------------------------------------------- //typedef renderer_outline_image<BaseRenderer, ImagePattern> self_type; //--------------------------------------------------------------------- public renderer_outline_image(IPixelFormat ren, line_image_pattern patt) { m_ren=ren; m_pattern=patt; m_start=(0); m_scale_x=(1.0); m_clip_box=new rect_i(0,0,0,0); m_clipping=(false); } public void attach(IPixelFormat ren) { m_ren = ren; } //--------------------------------------------------------------------- public void pattern(line_image_pattern p) { m_pattern = p; } public line_image_pattern pattern() { return m_pattern; } //--------------------------------------------------------------------- public void reset_clipping() { m_clipping = false; } public void clip_box(double x1, double y1, double x2, double y2) { m_clip_box.x1 = line_coord_sat.conv(x1); m_clip_box.y1 = line_coord_sat.conv(y1); m_clip_box.x2 = line_coord_sat.conv(x2); m_clip_box.y2 = line_coord_sat.conv(y2); m_clipping = true; } //--------------------------------------------------------------------- public void scale_x(double s) { m_scale_x = s; } public double scale_x() { return m_scale_x; } //--------------------------------------------------------------------- public void start_x(double s) { m_start = agg_basics.iround(s * LineAABasics.line_subpixel_scale); } public double start_x() { return (double)(m_start) / LineAABasics.line_subpixel_scale; } //--------------------------------------------------------------------- public int subpixel_width() { return m_pattern.line_width(); } public int pattern_width() { return m_pattern.pattern_width(); } public double width() { return (double)(subpixel_width()) / LineAABasics.line_subpixel_scale; } //------------------------------------------------------------------------- public unsafe void pixel(RGBA_Bytes* p, int x, int y) { m_pattern.pixel(p, x, y); } //------------------------------------------------------------------------- public unsafe void blend_color_hspan(int x, int y, uint len, RGBA_Bytes* colors) { m_ren.blend_color_hspan(x, y, len, colors, null, 0); } //------------------------------------------------------------------------- public unsafe void blend_color_vspan(int x, int y, uint len, RGBA_Bytes* colors) { m_ren.blend_color_vspan(x, y, len, colors, null, 0); } //------------------------------------------------------------------------- public static bool accurate_join_only() { return true; } /* //------------------------------------------------------------------------- public void semidot(Cmp, int, int, int, int) { } //------------------------------------------------------------------------- public void pie(int, int, int, int, int, int) { } //------------------------------------------------------------------------- public void line0(line_parameters) { } //------------------------------------------------------------------------- public void line1(line_parameters, int, int) { } //------------------------------------------------------------------------- public void line2(line_parameters, int, int) { } */ //------------------------------------------------------------------------- public void line3_no_clip(line_parameters lp, int sx, int sy, int ex, int ey) { if(lp.len > LineAABasics.line_max_length) { line_parameters lp1, lp2; lp.divide(lp1, lp2); int mx = lp1.x2 + (lp1.y2 - lp1.y1); int my = lp1.y2 - (lp1.x2 - lp1.x1); line3_no_clip(lp1, (lp.x1 + sx) >> 1, (lp.y1 + sy) >> 1, mx, my); line3_no_clip(lp2, mx, my, (lp.x2 + ex) >> 1, (lp.y2 + ey) >> 1); return; } LineAABasics.fix_degenerate_bisectrix_start(lp, ref sx, ref sy); LineAABasics.fix_degenerate_bisectrix_end(lp, ref ex, ref ey); line_interpolator_image li = new line_interpolator_image(*this, lp, sx, sy, ex, ey, m_start, m_scale_x); if(li.vertical()) { while(li.step_ver()); } else { while(li.step_hor()); } m_start += uround(lp.len / m_scale_x); } //------------------------------------------------------------------------- public void line3(line_parameters lp, int sx, int sy, int ex, int ey) { if(m_clipping) { int x1 = lp.x1; int y1 = lp.y1; int x2 = lp.x2; int y2 = lp.y2; uint flags = clip_line_segment(&x1, &y1, &x2, &y2, m_clip_box); int start = m_start; if((flags & 4) == 0) { if(flags) { line_parameters lp2(x1, y1, x2, y2, uround(calc_distance(x1, y1, x2, y2))); if(flags & 1) { m_start += uround(calc_distance(lp.x1, lp.y1, x1, y1) / m_scale_x); sx = x1 + (y2 - y1); sy = y1 - (x2 - x1); } else { while(abs(sx - lp.x1) + abs(sy - lp.y1) > lp2.len) { sx = (lp.x1 + sx) >> 1; sy = (lp.y1 + sy) >> 1; } } if(flags & 2) { ex = x2 + (y2 - y1); ey = y2 - (x2 - x1); } else { while(abs(ex - lp.x2) + abs(ey - lp.y2) > lp2.len) { ex = (lp.x2 + ex) >> 1; ey = (lp.y2 + ey) >> 1; } } line3_no_clip(lp2, sx, sy, ex, ey); } else { line3_no_clip(lp, sx, sy, ex, ey); } } m_start = start + uround(lp.len / m_scale_x); } else { line3_no_clip(lp, sx, sy, ex, ey); } } }; } #endif
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gagvr = Google.Ads.GoogleAds.V8.Resources; using gax = Google.Api.Gax; using gaxgrpc = Google.Api.Gax.Grpc; using gaxgrpccore = Google.Api.Gax.Grpc.GrpcCore; using proto = Google.Protobuf; using grpccore = Grpc.Core; using grpcinter = Grpc.Core.Interceptors; using sys = System; using scg = System.Collections.Generic; using sco = System.Collections.ObjectModel; using st = System.Threading; using stt = System.Threading.Tasks; namespace Google.Ads.GoogleAds.V8.Services { /// <summary>Settings for <see cref="CustomerFeedServiceClient"/> instances.</summary> public sealed partial class CustomerFeedServiceSettings : gaxgrpc::ServiceSettingsBase { /// <summary>Get a new instance of the default <see cref="CustomerFeedServiceSettings"/>.</summary> /// <returns>A new instance of the default <see cref="CustomerFeedServiceSettings"/>.</returns> public static CustomerFeedServiceSettings GetDefault() => new CustomerFeedServiceSettings(); /// <summary>Constructs a new <see cref="CustomerFeedServiceSettings"/> object with default settings.</summary> public CustomerFeedServiceSettings() { } private CustomerFeedServiceSettings(CustomerFeedServiceSettings existing) : base(existing) { gax::GaxPreconditions.CheckNotNull(existing, nameof(existing)); GetCustomerFeedSettings = existing.GetCustomerFeedSettings; MutateCustomerFeedsSettings = existing.MutateCustomerFeedsSettings; OnCopy(existing); } partial void OnCopy(CustomerFeedServiceSettings existing); /// <summary> /// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to /// <c>CustomerFeedServiceClient.GetCustomerFeed</c> and <c>CustomerFeedServiceClient.GetCustomerFeedAsync</c>. /// </summary> /// <remarks> /// <list type="bullet"> /// <item><description>Initial retry delay: 5000 milliseconds.</description></item> /// <item><description>Retry delay multiplier: 1.3</description></item> /// <item><description>Retry maximum delay: 60000 milliseconds.</description></item> /// <item><description>Maximum attempts: Unlimited</description></item> /// <item> /// <description> /// Retriable status codes: <see cref="grpccore::StatusCode.Unavailable"/>, /// <see cref="grpccore::StatusCode.DeadlineExceeded"/>. /// </description> /// </item> /// <item><description>Timeout: 3600 seconds.</description></item> /// </list> /// </remarks> public gaxgrpc::CallSettings GetCustomerFeedSettings { get; set; } = gaxgrpc::CallSettingsExtensions.WithRetry(gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(3600000))), gaxgrpc::RetrySettings.FromExponentialBackoff(maxAttempts: 2147483647, initialBackoff: sys::TimeSpan.FromMilliseconds(5000), maxBackoff: sys::TimeSpan.FromMilliseconds(60000), backoffMultiplier: 1.3, retryFilter: gaxgrpc::RetrySettings.FilterForStatusCodes(grpccore::StatusCode.Unavailable, grpccore::StatusCode.DeadlineExceeded))); /// <summary> /// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to /// <c>CustomerFeedServiceClient.MutateCustomerFeeds</c> and /// <c>CustomerFeedServiceClient.MutateCustomerFeedsAsync</c>. /// </summary> /// <remarks> /// <list type="bullet"> /// <item><description>Initial retry delay: 5000 milliseconds.</description></item> /// <item><description>Retry delay multiplier: 1.3</description></item> /// <item><description>Retry maximum delay: 60000 milliseconds.</description></item> /// <item><description>Maximum attempts: Unlimited</description></item> /// <item> /// <description> /// Retriable status codes: <see cref="grpccore::StatusCode.Unavailable"/>, /// <see cref="grpccore::StatusCode.DeadlineExceeded"/>. /// </description> /// </item> /// <item><description>Timeout: 3600 seconds.</description></item> /// </list> /// </remarks> public gaxgrpc::CallSettings MutateCustomerFeedsSettings { get; set; } = gaxgrpc::CallSettingsExtensions.WithRetry(gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(3600000))), gaxgrpc::RetrySettings.FromExponentialBackoff(maxAttempts: 2147483647, initialBackoff: sys::TimeSpan.FromMilliseconds(5000), maxBackoff: sys::TimeSpan.FromMilliseconds(60000), backoffMultiplier: 1.3, retryFilter: gaxgrpc::RetrySettings.FilterForStatusCodes(grpccore::StatusCode.Unavailable, grpccore::StatusCode.DeadlineExceeded))); /// <summary>Creates a deep clone of this object, with all the same property values.</summary> /// <returns>A deep clone of this <see cref="CustomerFeedServiceSettings"/> object.</returns> public CustomerFeedServiceSettings Clone() => new CustomerFeedServiceSettings(this); } /// <summary> /// Builder class for <see cref="CustomerFeedServiceClient"/> to provide simple configuration of credentials, /// endpoint etc. /// </summary> internal sealed partial class CustomerFeedServiceClientBuilder : gaxgrpc::ClientBuilderBase<CustomerFeedServiceClient> { /// <summary>The settings to use for RPCs, or <c>null</c> for the default settings.</summary> public CustomerFeedServiceSettings Settings { get; set; } /// <summary>Creates a new builder with default settings.</summary> public CustomerFeedServiceClientBuilder() { UseJwtAccessWithScopes = CustomerFeedServiceClient.UseJwtAccessWithScopes; } partial void InterceptBuild(ref CustomerFeedServiceClient client); partial void InterceptBuildAsync(st::CancellationToken cancellationToken, ref stt::Task<CustomerFeedServiceClient> task); /// <summary>Builds the resulting client.</summary> public override CustomerFeedServiceClient Build() { CustomerFeedServiceClient client = null; InterceptBuild(ref client); return client ?? BuildImpl(); } /// <summary>Builds the resulting client asynchronously.</summary> public override stt::Task<CustomerFeedServiceClient> BuildAsync(st::CancellationToken cancellationToken = default) { stt::Task<CustomerFeedServiceClient> task = null; InterceptBuildAsync(cancellationToken, ref task); return task ?? BuildAsyncImpl(cancellationToken); } private CustomerFeedServiceClient BuildImpl() { Validate(); grpccore::CallInvoker callInvoker = CreateCallInvoker(); return CustomerFeedServiceClient.Create(callInvoker, Settings); } private async stt::Task<CustomerFeedServiceClient> BuildAsyncImpl(st::CancellationToken cancellationToken) { Validate(); grpccore::CallInvoker callInvoker = await CreateCallInvokerAsync(cancellationToken).ConfigureAwait(false); return CustomerFeedServiceClient.Create(callInvoker, Settings); } /// <summary>Returns the endpoint for this builder type, used if no endpoint is otherwise specified.</summary> protected override string GetDefaultEndpoint() => CustomerFeedServiceClient.DefaultEndpoint; /// <summary> /// Returns the default scopes for this builder type, used if no scopes are otherwise specified. /// </summary> protected override scg::IReadOnlyList<string> GetDefaultScopes() => CustomerFeedServiceClient.DefaultScopes; /// <summary>Returns the channel pool to use when no other options are specified.</summary> protected override gaxgrpc::ChannelPool GetChannelPool() => CustomerFeedServiceClient.ChannelPool; /// <summary>Returns the default <see cref="gaxgrpc::GrpcAdapter"/>to use if not otherwise specified.</summary> protected override gaxgrpc::GrpcAdapter DefaultGrpcAdapter => gaxgrpccore::GrpcCoreAdapter.Instance; } /// <summary>CustomerFeedService client wrapper, for convenient use.</summary> /// <remarks> /// Service to manage customer feeds. /// </remarks> public abstract partial class CustomerFeedServiceClient { /// <summary> /// The default endpoint for the CustomerFeedService service, which is a host of "googleads.googleapis.com" and /// a port of 443. /// </summary> public static string DefaultEndpoint { get; } = "googleads.googleapis.com:443"; /// <summary>The default CustomerFeedService scopes.</summary> /// <remarks> /// The default CustomerFeedService scopes are: /// <list type="bullet"><item><description>https://www.googleapis.com/auth/adwords</description></item></list> /// </remarks> public static scg::IReadOnlyList<string> DefaultScopes { get; } = new sco::ReadOnlyCollection<string>(new string[] { "https://www.googleapis.com/auth/adwords", }); internal static gaxgrpc::ChannelPool ChannelPool { get; } = new gaxgrpc::ChannelPool(DefaultScopes, UseJwtAccessWithScopes); internal static bool UseJwtAccessWithScopes { get { bool useJwtAccessWithScopes = true; MaybeUseJwtAccessWithScopes(ref useJwtAccessWithScopes); return useJwtAccessWithScopes; } } static partial void MaybeUseJwtAccessWithScopes(ref bool useJwtAccessWithScopes); /// <summary> /// Asynchronously creates a <see cref="CustomerFeedServiceClient"/> using the default credentials, endpoint and /// settings. To specify custom credentials or other settings, use /// <see cref="CustomerFeedServiceClientBuilder"/>. /// </summary> /// <param name="cancellationToken"> /// The <see cref="st::CancellationToken"/> to use while creating the client. /// </param> /// <returns>The task representing the created <see cref="CustomerFeedServiceClient"/>.</returns> public static stt::Task<CustomerFeedServiceClient> CreateAsync(st::CancellationToken cancellationToken = default) => new CustomerFeedServiceClientBuilder().BuildAsync(cancellationToken); /// <summary> /// Synchronously creates a <see cref="CustomerFeedServiceClient"/> using the default credentials, endpoint and /// settings. To specify custom credentials or other settings, use /// <see cref="CustomerFeedServiceClientBuilder"/>. /// </summary> /// <returns>The created <see cref="CustomerFeedServiceClient"/>.</returns> public static CustomerFeedServiceClient Create() => new CustomerFeedServiceClientBuilder().Build(); /// <summary> /// Creates a <see cref="CustomerFeedServiceClient"/> which uses the specified call invoker for remote /// operations. /// </summary> /// <param name="callInvoker"> /// The <see cref="grpccore::CallInvoker"/> for remote operations. Must not be null. /// </param> /// <param name="settings">Optional <see cref="CustomerFeedServiceSettings"/>.</param> /// <returns>The created <see cref="CustomerFeedServiceClient"/>.</returns> internal static CustomerFeedServiceClient Create(grpccore::CallInvoker callInvoker, CustomerFeedServiceSettings settings = null) { gax::GaxPreconditions.CheckNotNull(callInvoker, nameof(callInvoker)); grpcinter::Interceptor interceptor = settings?.Interceptor; if (interceptor != null) { callInvoker = grpcinter::CallInvokerExtensions.Intercept(callInvoker, interceptor); } CustomerFeedService.CustomerFeedServiceClient grpcClient = new CustomerFeedService.CustomerFeedServiceClient(callInvoker); return new CustomerFeedServiceClientImpl(grpcClient, settings); } /// <summary> /// Shuts down any channels automatically created by <see cref="Create()"/> and /// <see cref="CreateAsync(st::CancellationToken)"/>. Channels which weren't automatically created are not /// affected. /// </summary> /// <remarks> /// After calling this method, further calls to <see cref="Create()"/> and /// <see cref="CreateAsync(st::CancellationToken)"/> will create new channels, which could in turn be shut down /// by another call to this method. /// </remarks> /// <returns>A task representing the asynchronous shutdown operation.</returns> public static stt::Task ShutdownDefaultChannelsAsync() => ChannelPool.ShutdownChannelsAsync(); /// <summary>The underlying gRPC CustomerFeedService client</summary> public virtual CustomerFeedService.CustomerFeedServiceClient GrpcClient => throw new sys::NotImplementedException(); /// <summary> /// Returns the requested customer feed in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual gagvr::CustomerFeed GetCustomerFeed(GetCustomerFeedRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Returns the requested customer feed in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<gagvr::CustomerFeed> GetCustomerFeedAsync(GetCustomerFeedRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Returns the requested customer feed in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<gagvr::CustomerFeed> GetCustomerFeedAsync(GetCustomerFeedRequest request, st::CancellationToken cancellationToken) => GetCustomerFeedAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Returns the requested customer feed in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="resourceName"> /// Required. The resource name of the customer feed to fetch. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual gagvr::CustomerFeed GetCustomerFeed(string resourceName, gaxgrpc::CallSettings callSettings = null) => GetCustomerFeed(new GetCustomerFeedRequest { ResourceName = gax::GaxPreconditions.CheckNotNullOrEmpty(resourceName, nameof(resourceName)), }, callSettings); /// <summary> /// Returns the requested customer feed in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="resourceName"> /// Required. The resource name of the customer feed to fetch. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<gagvr::CustomerFeed> GetCustomerFeedAsync(string resourceName, gaxgrpc::CallSettings callSettings = null) => GetCustomerFeedAsync(new GetCustomerFeedRequest { ResourceName = gax::GaxPreconditions.CheckNotNullOrEmpty(resourceName, nameof(resourceName)), }, callSettings); /// <summary> /// Returns the requested customer feed in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="resourceName"> /// Required. The resource name of the customer feed to fetch. /// </param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<gagvr::CustomerFeed> GetCustomerFeedAsync(string resourceName, st::CancellationToken cancellationToken) => GetCustomerFeedAsync(resourceName, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Returns the requested customer feed in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="resourceName"> /// Required. The resource name of the customer feed to fetch. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual gagvr::CustomerFeed GetCustomerFeed(gagvr::CustomerFeedName resourceName, gaxgrpc::CallSettings callSettings = null) => GetCustomerFeed(new GetCustomerFeedRequest { ResourceNameAsCustomerFeedName = gax::GaxPreconditions.CheckNotNull(resourceName, nameof(resourceName)), }, callSettings); /// <summary> /// Returns the requested customer feed in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="resourceName"> /// Required. The resource name of the customer feed to fetch. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<gagvr::CustomerFeed> GetCustomerFeedAsync(gagvr::CustomerFeedName resourceName, gaxgrpc::CallSettings callSettings = null) => GetCustomerFeedAsync(new GetCustomerFeedRequest { ResourceNameAsCustomerFeedName = gax::GaxPreconditions.CheckNotNull(resourceName, nameof(resourceName)), }, callSettings); /// <summary> /// Returns the requested customer feed in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="resourceName"> /// Required. The resource name of the customer feed to fetch. /// </param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<gagvr::CustomerFeed> GetCustomerFeedAsync(gagvr::CustomerFeedName resourceName, st::CancellationToken cancellationToken) => GetCustomerFeedAsync(resourceName, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Creates, updates, or removes customer feeds. Operation statuses are /// returned. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [CollectionSizeError]() /// [CustomerFeedError]() /// [DatabaseError]() /// [DistinctError]() /// [FieldError]() /// [FieldMaskError]() /// [FunctionError]() /// [FunctionParsingError]() /// [HeaderError]() /// [IdError]() /// [InternalError]() /// [MutateError]() /// [NotEmptyError]() /// [OperatorError]() /// [QuotaError]() /// [RangeError]() /// [RequestError]() /// [SizeLimitError]() /// [StringFormatError]() /// [StringLengthError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual MutateCustomerFeedsResponse MutateCustomerFeeds(MutateCustomerFeedsRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Creates, updates, or removes customer feeds. Operation statuses are /// returned. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [CollectionSizeError]() /// [CustomerFeedError]() /// [DatabaseError]() /// [DistinctError]() /// [FieldError]() /// [FieldMaskError]() /// [FunctionError]() /// [FunctionParsingError]() /// [HeaderError]() /// [IdError]() /// [InternalError]() /// [MutateError]() /// [NotEmptyError]() /// [OperatorError]() /// [QuotaError]() /// [RangeError]() /// [RequestError]() /// [SizeLimitError]() /// [StringFormatError]() /// [StringLengthError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<MutateCustomerFeedsResponse> MutateCustomerFeedsAsync(MutateCustomerFeedsRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Creates, updates, or removes customer feeds. Operation statuses are /// returned. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [CollectionSizeError]() /// [CustomerFeedError]() /// [DatabaseError]() /// [DistinctError]() /// [FieldError]() /// [FieldMaskError]() /// [FunctionError]() /// [FunctionParsingError]() /// [HeaderError]() /// [IdError]() /// [InternalError]() /// [MutateError]() /// [NotEmptyError]() /// [OperatorError]() /// [QuotaError]() /// [RangeError]() /// [RequestError]() /// [SizeLimitError]() /// [StringFormatError]() /// [StringLengthError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<MutateCustomerFeedsResponse> MutateCustomerFeedsAsync(MutateCustomerFeedsRequest request, st::CancellationToken cancellationToken) => MutateCustomerFeedsAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Creates, updates, or removes customer feeds. Operation statuses are /// returned. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [CollectionSizeError]() /// [CustomerFeedError]() /// [DatabaseError]() /// [DistinctError]() /// [FieldError]() /// [FieldMaskError]() /// [FunctionError]() /// [FunctionParsingError]() /// [HeaderError]() /// [IdError]() /// [InternalError]() /// [MutateError]() /// [NotEmptyError]() /// [OperatorError]() /// [QuotaError]() /// [RangeError]() /// [RequestError]() /// [SizeLimitError]() /// [StringFormatError]() /// [StringLengthError]() /// </summary> /// <param name="customerId"> /// Required. The ID of the customer whose customer feeds are being modified. /// </param> /// <param name="operations"> /// Required. The list of operations to perform on individual customer feeds. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual MutateCustomerFeedsResponse MutateCustomerFeeds(string customerId, scg::IEnumerable<CustomerFeedOperation> operations, gaxgrpc::CallSettings callSettings = null) => MutateCustomerFeeds(new MutateCustomerFeedsRequest { CustomerId = gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), Operations = { gax::GaxPreconditions.CheckNotNull(operations, nameof(operations)), }, }, callSettings); /// <summary> /// Creates, updates, or removes customer feeds. Operation statuses are /// returned. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [CollectionSizeError]() /// [CustomerFeedError]() /// [DatabaseError]() /// [DistinctError]() /// [FieldError]() /// [FieldMaskError]() /// [FunctionError]() /// [FunctionParsingError]() /// [HeaderError]() /// [IdError]() /// [InternalError]() /// [MutateError]() /// [NotEmptyError]() /// [OperatorError]() /// [QuotaError]() /// [RangeError]() /// [RequestError]() /// [SizeLimitError]() /// [StringFormatError]() /// [StringLengthError]() /// </summary> /// <param name="customerId"> /// Required. The ID of the customer whose customer feeds are being modified. /// </param> /// <param name="operations"> /// Required. The list of operations to perform on individual customer feeds. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<MutateCustomerFeedsResponse> MutateCustomerFeedsAsync(string customerId, scg::IEnumerable<CustomerFeedOperation> operations, gaxgrpc::CallSettings callSettings = null) => MutateCustomerFeedsAsync(new MutateCustomerFeedsRequest { CustomerId = gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), Operations = { gax::GaxPreconditions.CheckNotNull(operations, nameof(operations)), }, }, callSettings); /// <summary> /// Creates, updates, or removes customer feeds. Operation statuses are /// returned. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [CollectionSizeError]() /// [CustomerFeedError]() /// [DatabaseError]() /// [DistinctError]() /// [FieldError]() /// [FieldMaskError]() /// [FunctionError]() /// [FunctionParsingError]() /// [HeaderError]() /// [IdError]() /// [InternalError]() /// [MutateError]() /// [NotEmptyError]() /// [OperatorError]() /// [QuotaError]() /// [RangeError]() /// [RequestError]() /// [SizeLimitError]() /// [StringFormatError]() /// [StringLengthError]() /// </summary> /// <param name="customerId"> /// Required. The ID of the customer whose customer feeds are being modified. /// </param> /// <param name="operations"> /// Required. The list of operations to perform on individual customer feeds. /// </param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<MutateCustomerFeedsResponse> MutateCustomerFeedsAsync(string customerId, scg::IEnumerable<CustomerFeedOperation> operations, st::CancellationToken cancellationToken) => MutateCustomerFeedsAsync(customerId, operations, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); } /// <summary>CustomerFeedService client wrapper implementation, for convenient use.</summary> /// <remarks> /// Service to manage customer feeds. /// </remarks> public sealed partial class CustomerFeedServiceClientImpl : CustomerFeedServiceClient { private readonly gaxgrpc::ApiCall<GetCustomerFeedRequest, gagvr::CustomerFeed> _callGetCustomerFeed; private readonly gaxgrpc::ApiCall<MutateCustomerFeedsRequest, MutateCustomerFeedsResponse> _callMutateCustomerFeeds; /// <summary> /// Constructs a client wrapper for the CustomerFeedService service, with the specified gRPC client and /// settings. /// </summary> /// <param name="grpcClient">The underlying gRPC client.</param> /// <param name="settings">The base <see cref="CustomerFeedServiceSettings"/> used within this client.</param> public CustomerFeedServiceClientImpl(CustomerFeedService.CustomerFeedServiceClient grpcClient, CustomerFeedServiceSettings settings) { GrpcClient = grpcClient; CustomerFeedServiceSettings effectiveSettings = settings ?? CustomerFeedServiceSettings.GetDefault(); gaxgrpc::ClientHelper clientHelper = new gaxgrpc::ClientHelper(effectiveSettings); _callGetCustomerFeed = clientHelper.BuildApiCall<GetCustomerFeedRequest, gagvr::CustomerFeed>(grpcClient.GetCustomerFeedAsync, grpcClient.GetCustomerFeed, effectiveSettings.GetCustomerFeedSettings).WithGoogleRequestParam("resource_name", request => request.ResourceName); Modify_ApiCall(ref _callGetCustomerFeed); Modify_GetCustomerFeedApiCall(ref _callGetCustomerFeed); _callMutateCustomerFeeds = clientHelper.BuildApiCall<MutateCustomerFeedsRequest, MutateCustomerFeedsResponse>(grpcClient.MutateCustomerFeedsAsync, grpcClient.MutateCustomerFeeds, effectiveSettings.MutateCustomerFeedsSettings).WithGoogleRequestParam("customer_id", request => request.CustomerId); Modify_ApiCall(ref _callMutateCustomerFeeds); Modify_MutateCustomerFeedsApiCall(ref _callMutateCustomerFeeds); OnConstruction(grpcClient, effectiveSettings, clientHelper); } partial void Modify_ApiCall<TRequest, TResponse>(ref gaxgrpc::ApiCall<TRequest, TResponse> call) where TRequest : class, proto::IMessage<TRequest> where TResponse : class, proto::IMessage<TResponse>; partial void Modify_GetCustomerFeedApiCall(ref gaxgrpc::ApiCall<GetCustomerFeedRequest, gagvr::CustomerFeed> call); partial void Modify_MutateCustomerFeedsApiCall(ref gaxgrpc::ApiCall<MutateCustomerFeedsRequest, MutateCustomerFeedsResponse> call); partial void OnConstruction(CustomerFeedService.CustomerFeedServiceClient grpcClient, CustomerFeedServiceSettings effectiveSettings, gaxgrpc::ClientHelper clientHelper); /// <summary>The underlying gRPC CustomerFeedService client</summary> public override CustomerFeedService.CustomerFeedServiceClient GrpcClient { get; } partial void Modify_GetCustomerFeedRequest(ref GetCustomerFeedRequest request, ref gaxgrpc::CallSettings settings); partial void Modify_MutateCustomerFeedsRequest(ref MutateCustomerFeedsRequest request, ref gaxgrpc::CallSettings settings); /// <summary> /// Returns the requested customer feed in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public override gagvr::CustomerFeed GetCustomerFeed(GetCustomerFeedRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_GetCustomerFeedRequest(ref request, ref callSettings); return _callGetCustomerFeed.Sync(request, callSettings); } /// <summary> /// Returns the requested customer feed in full detail. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public override stt::Task<gagvr::CustomerFeed> GetCustomerFeedAsync(GetCustomerFeedRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_GetCustomerFeedRequest(ref request, ref callSettings); return _callGetCustomerFeed.Async(request, callSettings); } /// <summary> /// Creates, updates, or removes customer feeds. Operation statuses are /// returned. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [CollectionSizeError]() /// [CustomerFeedError]() /// [DatabaseError]() /// [DistinctError]() /// [FieldError]() /// [FieldMaskError]() /// [FunctionError]() /// [FunctionParsingError]() /// [HeaderError]() /// [IdError]() /// [InternalError]() /// [MutateError]() /// [NotEmptyError]() /// [OperatorError]() /// [QuotaError]() /// [RangeError]() /// [RequestError]() /// [SizeLimitError]() /// [StringFormatError]() /// [StringLengthError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public override MutateCustomerFeedsResponse MutateCustomerFeeds(MutateCustomerFeedsRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_MutateCustomerFeedsRequest(ref request, ref callSettings); return _callMutateCustomerFeeds.Sync(request, callSettings); } /// <summary> /// Creates, updates, or removes customer feeds. Operation statuses are /// returned. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [CollectionSizeError]() /// [CustomerFeedError]() /// [DatabaseError]() /// [DistinctError]() /// [FieldError]() /// [FieldMaskError]() /// [FunctionError]() /// [FunctionParsingError]() /// [HeaderError]() /// [IdError]() /// [InternalError]() /// [MutateError]() /// [NotEmptyError]() /// [OperatorError]() /// [QuotaError]() /// [RangeError]() /// [RequestError]() /// [SizeLimitError]() /// [StringFormatError]() /// [StringLengthError]() /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public override stt::Task<MutateCustomerFeedsResponse> MutateCustomerFeedsAsync(MutateCustomerFeedsRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_MutateCustomerFeedsRequest(ref request, ref callSettings); return _callMutateCustomerFeeds.Async(request, callSettings); } } }
namespace Stress_and_Performance_Testing { using System; using System.Data.Entity; using System.ComponentModel.DataAnnotations.Schema; using System.Linq; public partial class Model1 : DbContext { public Model1() : base("name=Model1") { } public virtual DbSet<BUSINESSCATEGORY> BUSINESSCATEGORY { get; set; } public virtual DbSet<BUSINESSORDINAL> BUSINESSORDINAL { get; set; } public virtual DbSet<BUSSINESS> BUSSINESS { get; set; } public virtual DbSet<CARINFOR> CARINFOR { get; set; } public virtual DbSet<CATEGORIES> CATEGORIES { get; set; } public virtual DbSet<CONFIG> CONFIG { get; set; } public virtual DbSet<CORPORATEINFO> CORPORATEINFO { get; set; } public virtual DbSet<COUNTY> COUNTY { get; set; } public virtual DbSet<fushanbusiness> fushanbusiness { get; set; } public virtual DbSet<haiyangbusiness> haiyangbusiness { get; set; } public virtual DbSet<POPULATION> POPULATION { get; set; } public virtual DbSet<USERS> USERS { get; set; } protected override void OnModelCreating(DbModelBuilder modelBuilder) { modelBuilder.Entity<BUSINESSCATEGORY>() .Property(e => e.BUSINESSCODE) .IsUnicode(false); modelBuilder.Entity<BUSINESSCATEGORY>() .Property(e => e.BUSINESSNAME) .IsUnicode(false); modelBuilder.Entity<BUSINESSCATEGORY>() .Property(e => e.CATEGORY) .IsUnicode(false); modelBuilder.Entity<BUSINESSCATEGORY>() .Property(e => e.SERVICEAPI) .IsUnicode(false); modelBuilder.Entity<BUSINESSORDINAL>() .Property(e => e.BUSINESSDATE) .IsUnicode(false); modelBuilder.Entity<BUSINESSORDINAL>() .Property(e => e.CATEGORY) .IsUnicode(false); modelBuilder.Entity<BUSINESSORDINAL>() .Property(e => e.ORDINAL) .HasPrecision(38, 0); modelBuilder.Entity<BUSINESSORDINAL>() .Property(e => e.COUNTYCODE) .IsUnicode(false); modelBuilder.Entity<BUSSINESS>() .Property(e => e.ID) .HasPrecision(38, 0); modelBuilder.Entity<BUSSINESS>() .Property(e => e.TYPE) .HasPrecision(38, 0); modelBuilder.Entity<BUSSINESS>() .Property(e => e.START_TIME) .IsUnicode(false); modelBuilder.Entity<BUSSINESS>() .Property(e => e.END_TIME) .IsUnicode(false); modelBuilder.Entity<BUSSINESS>() .Property(e => e.STATUS) .HasPrecision(38, 0); modelBuilder.Entity<BUSSINESS>() .Property(e => e.SERIAL_NUM) .IsUnicode(false); modelBuilder.Entity<BUSSINESS>() .Property(e => e.REJECT_REASON) .IsUnicode(false); modelBuilder.Entity<BUSSINESS>() .Property(e => e.NAME) .IsUnicode(false); modelBuilder.Entity<BUSSINESS>() .Property(e => e.PHONE_NUM) .IsUnicode(false); modelBuilder.Entity<BUSSINESS>() .Property(e => e.PROCESS_USER) .IsUnicode(false); modelBuilder.Entity<BUSSINESS>() .Property(e => e.FILE_RECV_USER) .IsUnicode(false); modelBuilder.Entity<BUSSINESS>() .Property(e => e.TRANSFER_STATUS) .HasPrecision(38, 0); modelBuilder.Entity<BUSSINESS>() .Property(e => e.UPLOADER) .IsUnicode(false); modelBuilder.Entity<BUSSINESS>() .Property(e => e.COMPLETE_PAY_USER) .IsUnicode(false); modelBuilder.Entity<BUSSINESS>() .Property(e => e.ATTENTION) .IsUnicode(false); modelBuilder.Entity<BUSSINESS>() .Property(e => e.COUNTYCODE) .IsUnicode(false); modelBuilder.Entity<CARINFOR>() .Property(e => e.ID) .HasPrecision(38, 0); modelBuilder.Entity<CARINFOR>() .Property(e => e.CAR_NUM) .IsUnicode(false); modelBuilder.Entity<CARINFOR>() .Property(e => e.BRAND) .IsUnicode(false); modelBuilder.Entity<CARINFOR>() .Property(e => e.MODEL_TYPE) .IsUnicode(false); modelBuilder.Entity<CARINFOR>() .Property(e => e.VIN) .IsUnicode(false); modelBuilder.Entity<CARINFOR>() .Property(e => e.PLATE_TYPE) .IsUnicode(false); modelBuilder.Entity<CARINFOR>() .Property(e => e.OWNER) .IsUnicode(false); modelBuilder.Entity<CARINFOR>() .Property(e => e.OWNER_ID) .IsUnicode(false); modelBuilder.Entity<CARINFOR>() .Property(e => e.CAR_LENGTH) .HasPrecision(3, 3); modelBuilder.Entity<CARINFOR>() .Property(e => e.CAR_WIDTH) .HasPrecision(3, 3); modelBuilder.Entity<CARINFOR>() .Property(e => e.CAR_HEIGHT) .HasPrecision(3, 3); modelBuilder.Entity<CARINFOR>() .Property(e => e.STANDARD_LENGTH) .HasPrecision(3, 3); modelBuilder.Entity<CARINFOR>() .Property(e => e.STANDARD_WIDTH) .HasPrecision(3, 3); modelBuilder.Entity<CARINFOR>() .Property(e => e.STANDARD_HEIGHT) .HasPrecision(3, 3); modelBuilder.Entity<CARINFOR>() .Property(e => e.QUEUE_NUM) .IsUnicode(false); modelBuilder.Entity<CARINFOR>() .Property(e => e.SERIAL_NUM) .IsUnicode(false); modelBuilder.Entity<CARINFOR>() .Property(e => e.FINISH) .HasPrecision(38, 0); modelBuilder.Entity<CARINFOR>() .Property(e => e.TASK_TYPE) .HasPrecision(38, 0); modelBuilder.Entity<CARINFOR>() .Property(e => e.INSPECTOR) .IsUnicode(false); modelBuilder.Entity<CARINFOR>() .Property(e => e.RECHECKER) .IsUnicode(false); modelBuilder.Entity<CARINFOR>() .Property(e => e.UNLOAD_TASK_NUM) .IsUnicode(false); modelBuilder.Entity<CARINFOR>() .Property(e => e.INVALID_TASK) .HasPrecision(38, 0); modelBuilder.Entity<CATEGORIES>() .Property(e => e.CATEGORY) .IsUnicode(false); modelBuilder.Entity<CATEGORIES>() .Property(e => e.NAME) .IsUnicode(false); modelBuilder.Entity<CONFIG>() .Property(e => e.COUNTYCODE) .IsUnicode(false); modelBuilder.Entity<CONFIG>() .Property(e => e.BUSINESSTABLENAME) .IsUnicode(false); modelBuilder.Entity<CORPORATEINFO>() .Property(e => e.CODE) .IsUnicode(false); modelBuilder.Entity<CORPORATEINFO>() .Property(e => e.NAME) .IsUnicode(false); modelBuilder.Entity<CORPORATEINFO>() .Property(e => e.ADDRESS) .IsUnicode(false); modelBuilder.Entity<COUNTY>() .Property(e => e.COUNTYCODE) .IsUnicode(false); modelBuilder.Entity<COUNTY>() .Property(e => e.NAME) .IsUnicode(false); modelBuilder.Entity<fushanbusiness>() .Property(e => e.ID) .HasPrecision(38, 0); modelBuilder.Entity<fushanbusiness>() .Property(e => e.TYPE) .HasPrecision(38, 0); modelBuilder.Entity<fushanbusiness>() .Property(e => e.START_TIME) .IsUnicode(false); modelBuilder.Entity<fushanbusiness>() .Property(e => e.END_TIME) .IsUnicode(false); modelBuilder.Entity<fushanbusiness>() .Property(e => e.STATUS) .HasPrecision(38, 0); modelBuilder.Entity<fushanbusiness>() .Property(e => e.SERIAL_NUM) .IsUnicode(false); modelBuilder.Entity<fushanbusiness>() .Property(e => e.REJECT_REASON) .IsUnicode(false); modelBuilder.Entity<fushanbusiness>() .Property(e => e.NAME) .IsUnicode(false); modelBuilder.Entity<fushanbusiness>() .Property(e => e.PHONE_NUM) .IsUnicode(false); modelBuilder.Entity<fushanbusiness>() .Property(e => e.PROCESS_USER) .IsUnicode(false); modelBuilder.Entity<fushanbusiness>() .Property(e => e.FILE_RECV_USER) .IsUnicode(false); modelBuilder.Entity<fushanbusiness>() .Property(e => e.TRANSFER_STATUS) .HasPrecision(38, 0); modelBuilder.Entity<fushanbusiness>() .Property(e => e.UPLOADER) .IsUnicode(false); modelBuilder.Entity<fushanbusiness>() .Property(e => e.COMPLETE_PAY_USER) .IsUnicode(false); modelBuilder.Entity<fushanbusiness>() .Property(e => e.ATTENTION) .IsUnicode(false); modelBuilder.Entity<fushanbusiness>() .Property(e => e.COUNTYCODE) .IsUnicode(false); modelBuilder.Entity<haiyangbusiness>() .Property(e => e.ID) .HasPrecision(38, 0); modelBuilder.Entity<haiyangbusiness>() .Property(e => e.TYPE) .HasPrecision(38, 0); modelBuilder.Entity<haiyangbusiness>() .Property(e => e.START_TIME) .IsUnicode(false); modelBuilder.Entity<haiyangbusiness>() .Property(e => e.END_TIME) .IsUnicode(false); modelBuilder.Entity<haiyangbusiness>() .Property(e => e.STATUS) .HasPrecision(38, 0); modelBuilder.Entity<haiyangbusiness>() .Property(e => e.SERIAL_NUM) .IsUnicode(false); modelBuilder.Entity<haiyangbusiness>() .Property(e => e.REJECT_REASON) .IsUnicode(false); modelBuilder.Entity<haiyangbusiness>() .Property(e => e.NAME) .IsUnicode(false); modelBuilder.Entity<haiyangbusiness>() .Property(e => e.PHONE_NUM) .IsUnicode(false); modelBuilder.Entity<haiyangbusiness>() .Property(e => e.PROCESS_USER) .IsUnicode(false); modelBuilder.Entity<haiyangbusiness>() .Property(e => e.FILE_RECV_USER) .IsUnicode(false); modelBuilder.Entity<haiyangbusiness>() .Property(e => e.TRANSFER_STATUS) .HasPrecision(38, 0); modelBuilder.Entity<haiyangbusiness>() .Property(e => e.UPLOADER) .IsUnicode(false); modelBuilder.Entity<haiyangbusiness>() .Property(e => e.COMPLETE_PAY_USER) .IsUnicode(false); modelBuilder.Entity<haiyangbusiness>() .Property(e => e.ATTENTION) .IsUnicode(false); modelBuilder.Entity<haiyangbusiness>() .Property(e => e.COUNTYCODE) .IsUnicode(false); modelBuilder.Entity<POPULATION>() .Property(e => e.NAME) .IsUnicode(false); modelBuilder.Entity<POPULATION>() .Property(e => e.SEX) .IsUnicode(false); modelBuilder.Entity<POPULATION>() .Property(e => e.NATION) .IsUnicode(false); modelBuilder.Entity<POPULATION>() .Property(e => e.BORN) .IsUnicode(false); modelBuilder.Entity<POPULATION>() .Property(e => e.ADDRESS) .IsUnicode(false); modelBuilder.Entity<POPULATION>() .Property(e => e.POSTCODE) .IsUnicode(false); modelBuilder.Entity<POPULATION>() .Property(e => e.POSTADDRESS) .IsUnicode(false); modelBuilder.Entity<POPULATION>() .Property(e => e.MOBILE) .IsUnicode(false); modelBuilder.Entity<POPULATION>() .Property(e => e.TELEPHONE) .IsUnicode(false); modelBuilder.Entity<POPULATION>() .Property(e => e.EMAIL) .IsUnicode(false); modelBuilder.Entity<POPULATION>() .Property(e => e.IDNUM) .IsUnicode(false); modelBuilder.Entity<POPULATION>() .Property(e => e.FIRSTFINGER) .IsUnicode(false); modelBuilder.Entity<POPULATION>() .Property(e => e.SECONDFINGER) .IsUnicode(false); modelBuilder.Entity<POPULATION>() .Property(e => e.LEFTEYE) .IsUnicode(false); modelBuilder.Entity<POPULATION>() .Property(e => e.RIGHTEYE) .IsUnicode(false); modelBuilder.Entity<USERS>() .Property(e => e.ID) .HasPrecision(38, 0); modelBuilder.Entity<USERS>() .Property(e => e.USERNAME) .IsUnicode(false); modelBuilder.Entity<USERS>() .Property(e => e.PASSWORD) .IsUnicode(false); modelBuilder.Entity<USERS>() .Property(e => e.LIMIT) .IsUnicode(false); modelBuilder.Entity<USERS>() .Property(e => e.DEPARTMENT) .IsUnicode(false); modelBuilder.Entity<USERS>() .Property(e => e.POST) .IsUnicode(false); modelBuilder.Entity<USERS>() .Property(e => e.POLICENUM) .IsUnicode(false); modelBuilder.Entity<USERS>() .Property(e => e.REALNAME) .IsUnicode(false); modelBuilder.Entity<USERS>() .Property(e => e.PDA_TYPE) .HasPrecision(38, 0); modelBuilder.Entity<USERS>() .Property(e => e.FIRSTFINGER) .IsUnicode(false); modelBuilder.Entity<USERS>() .Property(e => e.SECONDFINGER) .IsUnicode(false); modelBuilder.Entity<USERS>() .Property(e => e.COUNTYCODE) .IsUnicode(false); modelBuilder.Entity<USERS>() .Property(e => e.AUTHORITYLEVEL) .IsUnicode(false); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Runtime.InteropServices; using System.Threading; using System.Threading.Tasks; using Xunit; namespace System.IO.Tests { public class FileStream_WriteAsync : FileSystemTest { [Fact] public void NullBufferThrows() { using (FileStream fs = new FileStream(GetTestFilePath(), FileMode.Create)) { Assert.Throws<ArgumentNullException>("buffer", () => FSAssert.CompletesSynchronously(fs.WriteAsync(null, 0, 1))); } } [Fact] public void NegativeOffsetThrows() { using (FileStream fs = new FileStream(GetTestFilePath(), FileMode.Create)) { Assert.Throws<ArgumentOutOfRangeException>("offset", () => FSAssert.CompletesSynchronously(fs.WriteAsync(new byte[1], -1, 1))); // buffer is checked first Assert.Throws<ArgumentNullException>("buffer", () => FSAssert.CompletesSynchronously(fs.WriteAsync(null, -1, 1))); } } [Fact] public void NegativeCountThrows() { using (FileStream fs = new FileStream(GetTestFilePath(), FileMode.Create)) { Assert.Throws<ArgumentOutOfRangeException>("count", () => FSAssert.CompletesSynchronously(fs.WriteAsync(new byte[1], 0, -1))); // offset is checked before count Assert.Throws<ArgumentOutOfRangeException>("offset", () => FSAssert.CompletesSynchronously(fs.WriteAsync(new byte[1], -1, -1))); // buffer is checked first Assert.Throws<ArgumentNullException>("buffer", () => FSAssert.CompletesSynchronously(fs.WriteAsync(null, -1, -1))); } } [Fact] public void BufferOutOfBoundsThrows() { using (FileStream fs = new FileStream(GetTestFilePath(), FileMode.Create)) { // offset out of bounds Assert.Throws<ArgumentException>(null, () => FSAssert.CompletesSynchronously(fs.WriteAsync(new byte[1], 1, 1))); // offset out of bounds for 0 count WriteAsync Assert.Throws<ArgumentException>(null, () => FSAssert.CompletesSynchronously(fs.WriteAsync(new byte[1], 2, 0))); // offset out of bounds even for 0 length buffer Assert.Throws<ArgumentException>(null, () => FSAssert.CompletesSynchronously(fs.WriteAsync(new byte[0], 1, 0))); // combination offset and count out of bounds Assert.Throws<ArgumentException>(null, () => FSAssert.CompletesSynchronously(fs.WriteAsync(new byte[2], 1, 2))); // edges Assert.Throws<ArgumentException>(null, () => FSAssert.CompletesSynchronously(fs.WriteAsync(new byte[0], int.MaxValue, 0))); Assert.Throws<ArgumentException>(null, () => FSAssert.CompletesSynchronously(fs.WriteAsync(new byte[0], int.MaxValue, int.MaxValue))); } } [Fact] public void WriteAsyncDisposedThrows() { using (FileStream fs = new FileStream(GetTestFilePath(), FileMode.Create)) { fs.Dispose(); Assert.Throws<ObjectDisposedException>(() => FSAssert.CompletesSynchronously(fs.WriteAsync(new byte[1], 0, 1))); // even for noop WriteAsync Assert.Throws<ObjectDisposedException>(() => FSAssert.CompletesSynchronously(fs.WriteAsync(new byte[1], 0, 0))); // out of bounds checking happens first Assert.Throws<ArgumentException>(null, () => FSAssert.CompletesSynchronously(fs.WriteAsync(new byte[2], 1, 2))); // count is checked prior Assert.Throws<ArgumentOutOfRangeException>("count", () => FSAssert.CompletesSynchronously(fs.WriteAsync(new byte[1], 0, -1))); // offset is checked prior Assert.Throws<ArgumentOutOfRangeException>("offset", () => FSAssert.CompletesSynchronously(fs.WriteAsync(new byte[1], -1, -1))); // buffer is checked first Assert.Throws<ArgumentNullException>("buffer", () => FSAssert.CompletesSynchronously(fs.WriteAsync(null, -1, -1))); } } [Fact] public void ReadOnlyThrows() { string fileName = GetTestFilePath(); using (FileStream fs = new FileStream(fileName, FileMode.Create)) { } using (FileStream fs = new FileStream(fileName, FileMode.Open, FileAccess.Read)) { Assert.Throws<NotSupportedException>(() => FSAssert.CompletesSynchronously(fs.WriteAsync(new byte[1], 0, 1))); fs.Dispose(); // Disposed checking happens first Assert.Throws<ObjectDisposedException>(() => FSAssert.CompletesSynchronously(fs.WriteAsync(new byte[1], 0, 1))); // out of bounds checking happens first Assert.Throws<ArgumentException>(null, () => FSAssert.CompletesSynchronously(fs.WriteAsync(new byte[2], 1, 2))); // count is checked prior Assert.Throws<ArgumentOutOfRangeException>("count", () => FSAssert.CompletesSynchronously(fs.WriteAsync(new byte[1], 0, -1))); // offset is checked prior Assert.Throws<ArgumentOutOfRangeException>("offset", () => FSAssert.CompletesSynchronously(fs.WriteAsync(new byte[1], -1, -1))); // buffer is checked first Assert.Throws<ArgumentNullException>("buffer", () => FSAssert.CompletesSynchronously(fs.WriteAsync(null, -1, -1))); } } [Fact] public void CancelledTokenFastPath() { CancellationTokenSource cts = new CancellationTokenSource(); cts.Cancel(); CancellationToken cancelledToken = cts.Token; string fileName = GetTestFilePath(); using (FileStream fs = new FileStream(fileName, FileMode.Create)) { FSAssert.IsCancelled(fs.WriteAsync(new byte[1], 0, 1, cancelledToken), cancelledToken); } using (FileStream fs = new FileStream(fileName, FileMode.Open, FileAccess.Read)) { // before read only check FSAssert.IsCancelled(fs.WriteAsync(new byte[1], 0, 1, cancelledToken), cancelledToken); fs.Dispose(); // before disposed check FSAssert.IsCancelled(fs.WriteAsync(new byte[1], 0, 1, cancelledToken), cancelledToken); // out of bounds checking happens first Assert.Throws<ArgumentException>(null, () => FSAssert.CompletesSynchronously(fs.WriteAsync(new byte[2], 1, 2, cancelledToken))); // count is checked prior Assert.Throws<ArgumentOutOfRangeException>("count", () => FSAssert.CompletesSynchronously(fs.WriteAsync(new byte[1], 0, -1, cancelledToken))); // offset is checked prior Assert.Throws<ArgumentOutOfRangeException>("offset", () => FSAssert.CompletesSynchronously(fs.WriteAsync(new byte[1], -1, -1, cancelledToken))); // buffer is checked first Assert.Throws<ArgumentNullException>("buffer", () => FSAssert.CompletesSynchronously(fs.WriteAsync(null, -1, -1, cancelledToken))); } } [Fact] public async Task NoopWriteAsyncsSucceed() { using (FileStream fs = new FileStream(GetTestFilePath(), FileMode.Create)) { // note that these do not succeed synchronously even though they do nothing. await fs.WriteAsync(new byte[0], 0, 0); await fs.WriteAsync(new byte[1], 0, 0); // even though offset is out of bounds of buffer, this is still allowed // for the last element await fs.WriteAsync(new byte[1], 1, 0); await fs.WriteAsync(new byte[2], 1, 0); Assert.Equal(0, fs.Length); Assert.Equal(0, fs.Position); } } [Fact] public void WriteAsyncBufferedCompletesSynchronously() { using (FileStream fs = new FileStream( GetTestFilePath(), FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite | FileShare.Delete, TestBuffer.Length * 2, useAsync: true)) { FSAssert.CompletesSynchronously(fs.WriteAsync(new byte[TestBuffer.Length], 0, TestBuffer.Length)); } } [Fact] public async Task SimpleWriteAsync() { using (FileStream fs = new FileStream(GetTestFilePath(), FileMode.Create)) { await fs.WriteAsync(TestBuffer, 0, TestBuffer.Length); Assert.Equal(TestBuffer.Length, fs.Length); Assert.Equal(TestBuffer.Length, fs.Position); fs.Position = 0; byte[] buffer = new byte[TestBuffer.Length]; Assert.Equal(TestBuffer.Length, await fs.ReadAsync(buffer, 0, buffer.Length)); Assert.Equal(TestBuffer, buffer); } } [Fact] public async Task WriteAsyncCancelledFile() { const int writeSize = 1024 * 1024; using (FileStream fs = new FileStream(GetTestFilePath(), FileMode.Create)) { byte[] buffer = new byte[writeSize]; CancellationTokenSource cts = new CancellationTokenSource(); Task writeTask = fs.WriteAsync(buffer, 0, buffer.Length, cts.Token); cts.Cancel(); try { await writeTask; } catch (OperationCanceledException oce) { // Ideally we'd be doing an Assert.Throws<OperationCanceledException> // but since cancellation is a race condition we accept either outcome Assert.Equal(cts.Token, oce.CancellationToken); } } } [Fact] public async void WriteAsyncInternalBufferOverflow() { using (FileStream fs = new FileStream(GetTestFilePath(), FileMode.Create, FileAccess.Write, FileShare.None, 3, useAsync: true)) { // Fill buffer; should trigger flush of full buffer, no additional I/O await fs.WriteAsync(TestBuffer, 0, 3); Assert.True(fs.Length == 3); // Add to next buffer await fs.WriteAsync(TestBuffer, 0, 1); Assert.True(fs.Length == 4); // Complete that buffer; should trigger flush of full buffer, no additional I/O await fs.WriteAsync(TestBuffer, 0, 2); Assert.True(fs.Length == 6); // Add to next buffer await fs.WriteAsync(TestBuffer, 0, 2); Assert.True(fs.Length == 8); // Overflow buffer with amount that could fit in a buffer; should trigger a flush, with additional I/O await fs.WriteAsync(TestBuffer, 0, 2); Assert.True(fs.Length == 10); // Overflow buffer with amount that couldn't fit in a buffer; shouldn't be anything to flush, just an additional I/O await fs.WriteAsync(TestBuffer, 0, 4); Assert.True(fs.Length == 14); } } public static IEnumerable<object[]> MemberData_FileStreamAsyncWriting() { foreach (bool useAsync in new[] { true, false }) { if (useAsync && !RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { // We don't have a special async I/O implementation in FileStream on Unix. continue; } foreach (bool preSize in new[] { true, false }) { foreach (bool cancelable in new[] { true, false }) { yield return new object[] { useAsync, preSize, false, cancelable, 0x1000, 0x100, 100 }; yield return new object[] { useAsync, preSize, false, cancelable, 0x1, 0x1, 1000 }; yield return new object[] { useAsync, preSize, true, cancelable, 0x2, 0x100, 100 }; yield return new object[] { useAsync, preSize, false, cancelable, 0x4000, 0x10, 100 }; yield return new object[] { useAsync, preSize, true, cancelable, 0x1000, 99999, 10 }; } } } } [Fact] public Task ManyConcurrentWriteAsyncs() { // For inner loop, just test one case return ManyConcurrentWriteAsyncs( useAsync: RuntimeInformation.IsOSPlatform(OSPlatform.Windows), presize: false, exposeHandle: false, cancelable: true, bufferSize: 4096, writeSize: 1024, numWrites: 10); } [Theory] [MemberData(nameof(MemberData_FileStreamAsyncWriting))] [OuterLoop] // many combinations: we test just one in inner loop and the rest outer public async Task ManyConcurrentWriteAsyncs( bool useAsync, bool presize, bool exposeHandle, bool cancelable, int bufferSize, int writeSize, int numWrites) { long totalLength = writeSize * numWrites; var expectedData = new byte[totalLength]; new Random(42).NextBytes(expectedData); CancellationToken cancellationToken = cancelable ? new CancellationTokenSource().Token : CancellationToken.None; string path = GetTestFilePath(); using (FileStream fs = new FileStream(path, FileMode.Create, FileAccess.ReadWrite, FileShare.None, bufferSize, useAsync)) { if (presize) { fs.SetLength(totalLength); } if (exposeHandle) { var ignored = fs.SafeFileHandle; } Task[] writes = new Task[numWrites]; for (int i = 0; i < numWrites; i++) { writes[i] = fs.WriteAsync(expectedData, i * writeSize, writeSize, cancellationToken); Assert.Null(writes[i].Exception); if (useAsync) { Assert.Equal((i + 1) * writeSize, fs.Position); } } await Task.WhenAll(writes); } byte[] actualData = File.ReadAllBytes(path); Assert.Equal(expectedData.Length, actualData.Length); Assert.Equal<byte>(expectedData, actualData); } [Fact] public Task CopyToAsyncBetweenFileStreams() { // For inner loop, just test one case return CopyToAsyncBetweenFileStreams( useAsync: RuntimeInformation.IsOSPlatform(OSPlatform.Windows), preSize: false, exposeHandle: false, cancelable: true, bufferSize: 4096, writeSize: 1024, numWrites: 10); } [Fact] public void CopyToAsync_InvalidArgs_Throws() { using (FileStream fs = new FileStream(GetTestFilePath(), FileMode.Create)) { Assert.Throws<ArgumentNullException>("destination", () => { fs.CopyToAsync(null); }); Assert.Throws<ArgumentOutOfRangeException>("bufferSize", () => { fs.CopyToAsync(new MemoryStream(), 0); }); Assert.Throws<NotSupportedException>(() => { fs.CopyToAsync(new MemoryStream(new byte[1], writable: false)); }); fs.Dispose(); Assert.Throws<ObjectDisposedException>(() => { fs.CopyToAsync(new MemoryStream()); }); } using (FileStream fs = new FileStream(GetTestFilePath(), FileMode.Create, FileAccess.Write)) { Assert.Throws<NotSupportedException>(() => { fs.CopyToAsync(new MemoryStream()); }); } } [Theory] [MemberData(nameof(MemberData_FileStreamAsyncWriting))] [OuterLoop] // many combinations: we test just one in inner loop and the rest outer public async Task CopyToAsyncBetweenFileStreams( bool useAsync, bool preSize, bool exposeHandle, bool cancelable, int bufferSize, int writeSize, int numWrites) { long totalLength = writeSize * numWrites; var expectedData = new byte[totalLength]; new Random(42).NextBytes(expectedData); string srcPath = GetTestFilePath(); File.WriteAllBytes(srcPath, expectedData); string dstPath = GetTestFilePath(); using (FileStream src = new FileStream(srcPath, FileMode.Open, FileAccess.Read, FileShare.None, bufferSize, useAsync)) using (FileStream dst = new FileStream(dstPath, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize, useAsync)) { await src.CopyToAsync(dst, writeSize, cancelable ? new CancellationTokenSource().Token : CancellationToken.None); } byte[] actualData = File.ReadAllBytes(dstPath); Assert.Equal(expectedData.Length, actualData.Length); Assert.Equal<byte>(expectedData, actualData); } [Theory] [InlineData(true)] [InlineData(false)] public async Task BufferCorrectlyMaintainedWhenReadAndWrite(bool useAsync) { string path = GetTestFilePath(); File.WriteAllBytes(path, TestBuffer); using (FileStream fs = new FileStream(path, FileMode.Open, FileAccess.ReadWrite, FileShare.None, 2, useAsync)) { Assert.Equal(TestBuffer[0], await ReadByteAsync(fs)); Assert.Equal(TestBuffer[1], await ReadByteAsync(fs)); Assert.Equal(TestBuffer[2], await ReadByteAsync(fs)); await fs.WriteAsync(TestBuffer, 0, TestBuffer.Length); fs.Position = 0; Assert.Equal(TestBuffer[0], await ReadByteAsync(fs)); Assert.Equal(TestBuffer[1], await ReadByteAsync(fs)); Assert.Equal(TestBuffer[2], await ReadByteAsync(fs)); for (int i = 0; i < TestBuffer.Length; i++) { Assert.Equal(TestBuffer[i], await ReadByteAsync(fs)); } } } private static async Task<byte> ReadByteAsync(FileStream fs) { byte[] oneByte = new byte[1]; Assert.Equal(1, await fs.ReadAsync(oneByte, 0, 1)); return oneByte[0]; } [Fact, OuterLoop] public async Task WriteAsyncMiniStress() { TimeSpan testRunTime = TimeSpan.FromSeconds(30); const int MaximumWriteSize = 16 * 1024; const int NormalWriteSize = 4 * 1024; Random rand = new Random(); DateTime testStartTime = DateTime.UtcNow; // Generate data to write (NOTE: Randomizing this is important as some file systems may optimize writing 0s) byte[] dataToWrite = new byte[MaximumWriteSize]; rand.NextBytes(dataToWrite); string writeFileName = GetTestFilePath(); do { // Create a new token that expires between 100-1000ms CancellationTokenSource tokenSource = new CancellationTokenSource(); tokenSource.CancelAfter(rand.Next(100, 1000)); using (var stream = new FileStream(writeFileName, FileMode.Create, FileAccess.Write)) { do { try { // 20%: random write size int bytesToWrite = (rand.NextDouble() < 0.2 ? rand.Next(16, MaximumWriteSize) : NormalWriteSize); if (rand.NextDouble() < 0.1) { // 10%: Sync write stream.Write(dataToWrite, 0, bytesToWrite); } else { // 90%: Async write await stream.WriteAsync(dataToWrite, 0, bytesToWrite, tokenSource.Token); } } catch (TaskCanceledException) { Assert.True(tokenSource.Token.IsCancellationRequested, "Received cancellation exception before token expired"); } } while (!tokenSource.Token.IsCancellationRequested); } } while (DateTime.UtcNow - testStartTime <= testRunTime); } } }
//------------------------------------------------------------------------------ // <copyright file="ShapeGenerator.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> // <owner current="true" primary="true">derekdb</owner> //------------------------------------------------------------------------------ #if ENABLEDATABINDING using System; using System.Xml; using System.Xml.Schema; using System.Xml.XPath; using System.Collections; using System.Diagnostics; using System.ComponentModel; using System.Text; namespace System.Xml.XPath.DataBinding { internal sealed class ShapeGenerator { private Hashtable elementTypesProcessed; private IXmlNamespaceResolver nsResolver; public ShapeGenerator(IXmlNamespaceResolver nsResolver) { this.elementTypesProcessed = new Hashtable(); this.nsResolver = nsResolver; } public Shape GenerateFromSchema(XmlSchemaElement xse) { XmlQualifiedName xseName = xse.QualifiedName; XmlSchemaType schemaType = xse.ElementSchemaType; XmlSchemaComplexType complexType = schemaType as XmlSchemaComplexType; if (null != complexType) { XmlSchemaParticle particle = null; Shape rootShape = null; XmlSchemaContentType contentType = complexType.ElementDecl.ContentValidator.ContentType; switch (contentType) { case XmlSchemaContentType.Mixed: case XmlSchemaContentType.TextOnly: rootShape = new Shape(GenName(xseName) + "_Text", BindingType.Text); rootShape.AddParticle(xse); break; case XmlSchemaContentType.Empty: rootShape = new Shape(null, BindingType.Sequence); break; case XmlSchemaContentType.ElementOnly: particle = complexType.ContentTypeParticle; rootShape = ProcessParticle(particle, null); break; } Debug.Assert(rootShape != null); if (complexType.AttributeUses.Values.Count > 0) { if (rootShape.BindingType != BindingType.Sequence) { Shape s = new Shape(null, BindingType.Sequence); s.AddSubShape(rootShape); rootShape = s; } int pos = 0; string[] names = rootShape.SubShapeNames(); ICollection attributes = complexType.AttributeUses.Values; XmlSchemaAttribute[] xsaArray = new XmlSchemaAttribute[attributes.Count]; attributes.CopyTo(xsaArray, 0); Array.Sort(xsaArray, new XmlSchemaAttributeComparer()); foreach(XmlSchemaAttribute xsa in xsaArray) { string name = GenAttrName(xsa.QualifiedName, names); Shape attrShape = new Shape(name, BindingType.Attribute); attrShape.AddParticle(xsa); rootShape.AddAttrShapeAt(attrShape, pos++); } } if (rootShape.BindingType != BindingType.Text) { rootShape.Name = GenName(xseName); rootShape.ContainerDecl = xse; } return rootShape; } else { // simple type Shape s = new Shape(GenName(xseName), BindingType.Text); s.AddParticle(xse); return s; } } public Shape GenerateFromSchema(XmlSchemaAttribute xsa) { Shape s = new Shape(GenName(xsa.QualifiedName), BindingType.Attribute); s.AddParticle(xsa); return s; } Shape ProcessParticle(XmlSchemaParticle xsp, Shape parent) { Shape s; if (xsp == XmlSchemaParticle.Empty) { return null; } if (xsp is XmlSchemaElement) { s = ProcessParticleElement((XmlSchemaElement)xsp); } else if (xsp is XmlSchemaSequence) { s = ProcessParticleGroup((XmlSchemaSequence)xsp, BindingType.Sequence); } else if (xsp is XmlSchemaChoice) { s = ProcessParticleGroup((XmlSchemaChoice)xsp, BindingType.Choice); } else if (xsp is XmlSchemaAll) { s = ProcessParticleGroup((XmlSchemaAll)xsp, BindingType.All); } else { //XmlSchemaAny return null; //Ignore Any in the content model } if (xsp.MaxOccurs > 1) { Shape rep = new Shape(s.Name, BindingType.Repeat); rep.AddSubShape(s); s = rep; } if (parent != null) parent.AddSubShape(s); return s; } Shape ProcessParticleElement(XmlSchemaElement xse) { // watch out for recursive schema Shape s = (Shape)this.elementTypesProcessed[xse]; if (null != s) return s; bool complex = xse.ElementSchemaType is XmlSchemaComplexType; s = new Shape(GenName(xse.QualifiedName), complex ? BindingType.ElementNested : BindingType.Element); s.AddParticle(xse); if (complex) { this.elementTypesProcessed.Add(xse, s); s.NestedShape = GenerateFromSchema(xse); this.elementTypesProcessed.Remove(xse); } return s; } Shape ProcessParticleGroup(XmlSchemaGroupBase xsg, BindingType bt) { Shape s = new Shape(null, bt); StringBuilder sb = new StringBuilder(); foreach (XmlSchemaParticle xsp in xsg.Items) { Shape sub = ProcessParticle(xsp, s); if (sub != null) { //sub can be null if the child particle is xs:any if (sb.Length > 0) sb.Append('_'); sb.Append(sub.Name); } } // need to also test if paretn != null for this to work //if (s.IsGroup && s.SubShapes.Count == 1) { // Shape sub = (Shape)s.SubShapes[0]; // s.Clear(); // return sub; //} s.Name = sb.ToString(); return s; } string GenName(XmlQualifiedName xqn) { string ns = xqn.Namespace; string ln = xqn.Name; if (ns.Length != 0) { string prefix = (null==this.nsResolver) ? null : this.nsResolver.LookupPrefix(ns); if (prefix != null && prefix.Length != 0) return String.Concat(prefix, ":", ln); } return ln; } string GenAttrName(XmlQualifiedName xqn, string[] names) { string name = GenName(xqn); if (null != names) { for (int i=0; i<names.Length; i++) { if (name == names[i]) { return String.Concat("@", name); } } } return name; } public void ResetState() { this.elementTypesProcessed.Clear(); } class XmlSchemaAttributeComparer : IComparer { public virtual int Compare(object a, object b) { XmlSchemaAttribute xsaA = (XmlSchemaAttribute)a; XmlSchemaAttribute xsaB = (XmlSchemaAttribute)b; return XmlQualifiedName.Compare(xsaA.QualifiedName, xsaB.QualifiedName); } } } } #endif
using Orleans; using System; using System.Threading.Tasks; using TestExtensions; using UnitTests.GrainInterfaces; using Xunit; using Xunit.Abstractions; namespace UnitTests.General { /// <summary> /// Tests that exceptions are correctly propagated. /// </summary> public class ExceptionPropagationTests : OrleansTestingBase, IClassFixture<ExceptionPropagationTests.Fixture> { private const int TestIterations = 100; private readonly ITestOutputHelper output; private readonly Fixture fixture; private readonly IMessageSerializationGrain exceptionGrain; private readonly MessageSerializationClientObject clientObject = new MessageSerializationClientObject(); private readonly IMessageSerializationClientObject clientObjectRef; public ExceptionPropagationTests(ITestOutputHelper output, Fixture fixture) { this.output = output; this.fixture = fixture; var grainFactory = (IInternalGrainFactory)this.fixture.GrainFactory; this.exceptionGrain = grainFactory.GetGrain<IMessageSerializationGrain>(GetRandomGrainId()); this.clientObjectRef = grainFactory.CreateObjectReference<IMessageSerializationClientObject>(this.clientObject); } public class Fixture : BaseTestClusterFixture { } [Fact, TestCategory("BVT")] public async Task ExceptionsPropagatedFromGrainToClient() { var grain = this.fixture.Client.GetGrain<IExceptionGrain>(0); var invalidOperationException = await Assert.ThrowsAsync<InvalidOperationException>(() => grain.ThrowsInvalidOperationException()); Assert.Equal("Test exception", invalidOperationException.Message); var nullReferenceException = await Assert.ThrowsAsync<NullReferenceException>(() => grain.ThrowsNullReferenceException()); Assert.Equal("null null null", nullReferenceException.Message); } [Fact, TestCategory("BVT")] public async Task BasicExceptionPropagation() { IExceptionGrain grain = this.fixture.GrainFactory.GetGrain<IExceptionGrain>(GetRandomGrainId()); var exception = await Assert.ThrowsAsync<InvalidOperationException>( () => grain.ThrowsInvalidOperationException()); output.WriteLine(exception.ToString()); Assert.Equal("Test exception", exception.Message); } [Fact, TestCategory("BVT")] public void ExceptionContainsOriginalStackTrace() { IExceptionGrain grain = this.fixture.GrainFactory.GetGrain<IExceptionGrain>(GetRandomGrainId()); // Explicitly using .Wait() instead of await the task to avoid any modification of the inner exception var aggEx = Assert.Throws<AggregateException>( () => grain.ThrowsInvalidOperationException().Wait()); var exception = aggEx.InnerException; output.WriteLine(exception.ToString()); Assert.IsAssignableFrom<InvalidOperationException>(exception); Assert.Equal("Test exception", exception.Message); Assert.Contains("ThrowsInvalidOperationException", exception.StackTrace); } [Fact, TestCategory("BVT")] public async Task ExceptionContainsOriginalStackTraceWhenRethrowingLocally() { IExceptionGrain grain = this.fixture.GrainFactory.GetGrain<IExceptionGrain>(GetRandomGrainId()); try { // Use await to force the exception to be rethrown and validate that the remote stack trace is still present await grain.ThrowsInvalidOperationException(); Assert.True(false, "should have thrown"); } catch (InvalidOperationException exception) { output.WriteLine(exception.ToString()); Assert.IsAssignableFrom<InvalidOperationException>(exception); Assert.Equal("Test exception", exception.Message); Assert.Contains("ThrowsInvalidOperationException", exception.StackTrace); } } [Fact, TestCategory("BVT")] public async Task ExceptionPropagationDoesNotUnwrapAggregateExceptions() { IExceptionGrain grain = this.fixture.GrainFactory.GetGrain<IExceptionGrain>(GetRandomGrainId()); var exception = await Assert.ThrowsAsync<AggregateException>( () => grain.ThrowsAggregateExceptionWrappingInvalidOperationException()); var nestedEx = Assert.IsAssignableFrom<InvalidOperationException>(exception.InnerException); Assert.Equal("Test exception", nestedEx.Message); } [Fact, TestCategory("BVT")] public async Task ExceptionPropagationDoesNoFlattenAggregateExceptions() { IExceptionGrain grain = this.fixture.GrainFactory.GetGrain<IExceptionGrain>(GetRandomGrainId()); var exception = await Assert.ThrowsAsync<AggregateException>( () => grain.ThrowsNestedAggregateExceptionsWrappingInvalidOperationException()); var nestedAggEx = Assert.IsAssignableFrom<AggregateException>(exception.InnerException); var doubleNestedEx = Assert.IsAssignableFrom<InvalidOperationException>(nestedAggEx.InnerException); Assert.Equal("Test exception", doubleNestedEx.Message); } [Fact, TestCategory("BVT")] public async Task TaskCancelationPropagation() { IExceptionGrain grain = this.fixture.GrainFactory.GetGrain<IExceptionGrain>(GetRandomGrainId()); await Assert.ThrowsAsync<TaskCanceledException>( () => grain.Canceled()); } [Fact, TestCategory("BVT")] public async Task GrainForwardingExceptionPropagation() { IExceptionGrain grain = this.fixture.GrainFactory.GetGrain<IExceptionGrain>(GetRandomGrainId()); var otherGrainId = GetRandomGrainId(); var exception = await Assert.ThrowsAsync<InvalidOperationException>( () => grain.GrainCallToThrowsInvalidOperationException(otherGrainId)); Assert.Equal("Test exception", exception.Message); } [Fact, TestCategory("BVT")] public async Task GrainForwardingExceptionPropagationDoesNotUnwrapAggregateExceptions() { IExceptionGrain grain = this.fixture.GrainFactory.GetGrain<IExceptionGrain>(GetRandomGrainId()); var otherGrainId = GetRandomGrainId(); var exception = await Assert.ThrowsAsync<AggregateException>( () => grain.GrainCallToThrowsAggregateExceptionWrappingInvalidOperationException(otherGrainId)); var nestedEx = Assert.IsAssignableFrom<InvalidOperationException>(exception.InnerException); Assert.Equal("Test exception", nestedEx.Message); } [Fact, TestCategory("BVT")] public async Task SynchronousExceptionThrownShouldResultInFaultedTask() { IExceptionGrain grain = this.fixture.GrainFactory.GetGrain<IExceptionGrain>(GetRandomGrainId()); // start the grain call but don't await it nor wrap in try/catch, to make sure it doesn't throw synchronously var grainCallTask = grain.ThrowsSynchronousInvalidOperationException(); var exception = await Assert.ThrowsAsync<InvalidOperationException>(() => grainCallTask); Assert.Equal("Test exception", exception.Message); var grainCallTask2 = grain.ThrowsSynchronousInvalidOperationException(); var exception2 = await Assert.ThrowsAsync<InvalidOperationException>(() => grainCallTask2); Assert.Equal("Test exception", exception2.Message); } [Fact(Skip = "Implementation of issue #1378 is still pending"), TestCategory("BVT")] public void ExceptionPropagationForwardsEntireAggregateException() { IExceptionGrain grain = this.fixture.GrainFactory.GetGrain<IExceptionGrain>(GetRandomGrainId()); var grainCall = grain.ThrowsMultipleExceptionsAggregatedInFaultedTask(); try { // use Wait() so that we get the entire AggregateException ('await' would just catch the first inner exception) // Do not use Assert.Throws to avoid any tampering of the AggregateException itself from the test framework grainCall.Wait(); Assert.True(false, "Expected AggregateException"); } catch (AggregateException exception) { output.WriteLine(exception.ToString()); // make sure that all exceptions in the task are present, and not just the first one. Assert.Equal(2, exception.InnerExceptions.Count); var firstEx = Assert.IsAssignableFrom<InvalidOperationException>(exception.InnerExceptions[0]); Assert.Equal("Test exception 1", firstEx.Message); var secondEx = Assert.IsAssignableFrom<InvalidOperationException>(exception.InnerExceptions[1]); Assert.Equal("Test exception 2", secondEx.Message); } } [Fact, TestCategory("BVT")] public async Task SynchronousAggregateExceptionThrownShouldResultInFaultedTaskWithOriginalAggregateExceptionUnmodifiedAsInnerException() { IExceptionGrain grain = this.fixture.GrainFactory.GetGrain<IExceptionGrain>(GetRandomGrainId()); // start the grain call but don't await it nor wrap in try/catch, to make sure it doesn't throw synchronously var grainCallTask = grain.ThrowsSynchronousAggregateExceptionWithMultipleInnerExceptions(); // assert that the faulted task has an inner exception of type AggregateException, which should be our original exception var exception = await Assert.ThrowsAsync<AggregateException>(() => grainCallTask); Assert.StartsWith("Test AggregateException message", exception.Message); // make sure that all exceptions in the task are present, and not just the first one. Assert.Equal(2, exception.InnerExceptions.Count); var firstEx = Assert.IsAssignableFrom<InvalidOperationException>(exception.InnerExceptions[0]); Assert.Equal("Test exception 1", firstEx.Message); var secondEx = Assert.IsAssignableFrom<InvalidOperationException>(exception.InnerExceptions[1]); Assert.Equal("Test exception 2", secondEx.Message); } /// <summary> /// Tests that when a client cannot deserialize a request from a grain, an exception is promptly propagated back to the original caller. /// </summary> [Fact, TestCategory("BVT"), TestCategory("Messaging"), TestCategory("Serialization")] public async Task ExceptionPropagation_GrainCallsClient_Request_Deserialization_Failure() { for (var i = 0; i < TestIterations; i++) { var exception = await Assert.ThrowsAnyAsync<NotSupportedException>(() => exceptionGrain.SendUndeserializableToClient(this.clientObjectRef)); Assert.Contains(UndeserializableType.FailureMessage, exception.Message); } } /// <summary> /// Tests that when a client cannot serialize a response to a grain, an exception is promptly propagated back to the original caller. /// </summary> [Fact, TestCategory("BVT"), TestCategory("Messaging"), TestCategory("Serialization")] public async Task ExceptionPropagation_GrainCallsClient_Response_Serialization_Failure() { for (var i = 0; i < TestIterations; i++) { var exception = await Assert.ThrowsAnyAsync<NotSupportedException>(() => exceptionGrain.GetUnserializableFromClient(this.clientObjectRef)); Assert.Contains(UndeserializableType.FailureMessage, exception.Message); } } /// <summary> /// Tests that when a grain cannot deserialize a response from a client, an exception is promptly propagated back to the original caller. /// </summary> [Fact, TestCategory("BVT"), TestCategory("Messaging"), TestCategory("Serialization")] public async Task ExceptionPropagation_GrainCallsClient_Response_Deserialization_Failure() { for (var i = 0; i < TestIterations; i++) { var exception = await Assert.ThrowsAnyAsync<NotSupportedException>(() => exceptionGrain.GetUndeserializableFromClient(this.clientObjectRef)); Assert.Contains(UndeserializableType.FailureMessage, exception.Message); } } /// <summary> /// Tests that when a grain cannot serialize a request to a client, an exception is promptly propagated back to the original caller. /// </summary> [Fact, TestCategory("BVT"), TestCategory("Messaging"), TestCategory("Serialization")] public async Task ExceptionPropagation_GrainCallsClient_Request_Serialization_Failure() { for (var i = 0; i < TestIterations; i++) { var exception = await Assert.ThrowsAnyAsync<NotSupportedException>(() => exceptionGrain.SendUnserializableToClient(this.clientObjectRef)); Assert.Contains(UndeserializableType.FailureMessage, exception.Message); } } /// <summary> /// Tests that when a grain cannot deserialize a request from another grain, an exception is promptly propagated back to the original caller. /// </summary> [Fact, TestCategory("BVT"), TestCategory("Messaging"), TestCategory("Serialization")] public async Task ExceptionPropagation_GrainCallsGrain_Request_Deserialization_Failure() { for (var i = 0; i < TestIterations; i++) { var exception = await Assert.ThrowsAnyAsync<NotSupportedException>(() => exceptionGrain.SendUndeserializableToOtherSilo()); Assert.Contains(UndeserializableType.FailureMessage, exception.Message); } } /// <summary> /// Tests that when a grain cannot serialize a request to another grain, an exception is promptly propagated back to the original caller. /// </summary> [Fact, TestCategory("BVT"), TestCategory("Messaging"), TestCategory("Serialization")] public async Task ExceptionPropagation_GrainCallsGrain_Request_Serialization_Failure() { for (var i = 0; i < TestIterations; i++) { var exception = await Assert.ThrowsAnyAsync<NotSupportedException>(() => exceptionGrain.SendUnserializableToOtherSilo()); Assert.Contains(UndeserializableType.FailureMessage, exception.Message); } } /// <summary> /// Tests that when a grain cannot serialize a response to another grain, an exception is promptly propagated back to the original caller. /// </summary> [Fact, TestCategory("BVT"), TestCategory("Messaging"), TestCategory("Serialization")] public async Task ExceptionPropagation_GrainCallsGrain_Response_Serialization_Failure() { for (var i = 0; i < TestIterations; i++) { var exception = await Assert.ThrowsAnyAsync<NotSupportedException>(() => exceptionGrain.GetUnserializableFromOtherSilo()); Assert.Contains(UndeserializableType.FailureMessage, exception.Message); } } /// <summary> /// Tests that when a grain cannot deserialize a response from another grain, an exception is promptly propagated back to the original caller. /// </summary> [Fact, TestCategory("BVT"), TestCategory("Messaging"), TestCategory("Serialization")] public async Task ExceptionPropagation_GrainCallsGrain_Response_Deserialization_Failure() { for (var i = 0; i < TestIterations; i++) { var exception = await Assert.ThrowsAnyAsync<NotSupportedException>(() => exceptionGrain.GetUndeserializableFromOtherSilo()); Assert.Contains(UndeserializableType.FailureMessage, exception.Message); } } /// <summary> /// Tests that when a grain cannot deserialize a request from a client, an exception is promptly propagated back to the original caller. /// </summary> [Fact, TestCategory("BVT"), TestCategory("Messaging"), TestCategory("Serialization")] public async Task ExceptionPropagation_ClientCallsGrain_Request_Deserialization_Failure() { for (var i = 0; i < TestIterations; i++) { var exception = await Assert.ThrowsAnyAsync<NotSupportedException>(() => exceptionGrain.SendUndeserializable(new UndeserializableType(32))); Assert.Contains(UndeserializableType.FailureMessage, exception.Message); } } /// <summary> /// Tests that when a client cannot serialize a request to a grain, an exception is promptly propagated back to the original caller. /// </summary> [Fact, TestCategory("BVT"), TestCategory("Messaging"), TestCategory("Serialization")] public async Task ExceptionPropagation_ClientCallsGrain_Request_Serialization_Failure() { for (var i = 0; i < TestIterations; i++) { var exception = await Assert.ThrowsAnyAsync<NotSupportedException>(() => exceptionGrain.SendUnserializable(new UnserializableType())); Assert.Contains(UndeserializableType.FailureMessage, exception.Message); } } /// <summary> /// Tests that when a grain cannot serialize a response to a client, an exception is promptly propagated back to the original caller. /// </summary> [Fact, TestCategory("BVT"), TestCategory("Messaging"), TestCategory("Serialization")] public async Task ExceptionPropagation_ClientCallsGrain_Response_Serialization_Failure() { for (var i = 0; i < TestIterations; i++) { var exception = await Assert.ThrowsAnyAsync<Exception>(() => exceptionGrain.GetUnserializable()); Assert.Contains(UndeserializableType.FailureMessage, exception.Message); } } /// <summary> /// Tests that when a client cannot deserialize a response from a grain, an exception is promptly propagated back to the original caller. /// </summary> [Fact, TestCategory("BVT"), TestCategory("Messaging"), TestCategory("Serialization")] public async Task ExceptionPropagation_ClientCallsGrain_Response_Deserialization_Failure() { for (var i = 0; i < TestIterations; i++) { var exception = await Assert.ThrowsAnyAsync<Exception>(() => exceptionGrain.GetUndeserializable()); Assert.Contains(UndeserializableType.FailureMessage, exception.Message); } } private class MessageSerializationClientObject : IMessageSerializationClientObject { public Task SendUndeserializable(UndeserializableType input) => Task.FromResult(input); public Task SendUnserializable(UnserializableType input) => Task.FromResult(input); public Task<UnserializableType> GetUnserializable() => Task.FromResult(new UnserializableType()); public Task<UndeserializableType> GetUndeserializable() => Task.FromResult(new UndeserializableType(35)); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /****************************************************************************** * This file is auto-generated from a template file by the GenerateTests.csx * * script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make * * changes, please update the corresponding template and run according to the * * directions listed in the file. * ******************************************************************************/ using System; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.Intrinsics; using System.Runtime.Intrinsics.X86; namespace JIT.HardwareIntrinsics.X86 { public static partial class Program { private static void CompareLessThanInt32() { var test = new SimpleBinaryOpTest__CompareLessThanInt32(); if (test.IsSupported) { // Validates basic functionality works, using Unsafe.Read test.RunBasicScenario_UnsafeRead(); // Validates basic functionality works, using Load test.RunBasicScenario_Load(); // Validates basic functionality works, using LoadAligned test.RunBasicScenario_LoadAligned(); // Validates calling via reflection works, using Unsafe.Read test.RunReflectionScenario_UnsafeRead(); // Validates calling via reflection works, using Load test.RunReflectionScenario_Load(); // Validates calling via reflection works, using LoadAligned test.RunReflectionScenario_LoadAligned(); // Validates passing a static member works test.RunClsVarScenario(); // Validates passing a local works, using Unsafe.Read test.RunLclVarScenario_UnsafeRead(); // Validates passing a local works, using Load test.RunLclVarScenario_Load(); // Validates passing a local works, using LoadAligned test.RunLclVarScenario_LoadAligned(); // Validates passing the field of a local works test.RunLclFldScenario(); // Validates passing an instance member works test.RunFldScenario(); } else { // Validates we throw on unsupported hardware test.RunUnsupportedScenario(); } if (!test.Succeeded) { throw new Exception("One or more scenarios did not complete as expected."); } } } public sealed unsafe class SimpleBinaryOpTest__CompareLessThanInt32 { private const int VectorSize = 16; private const int ElementCount = VectorSize / sizeof(Int32); private static Int32[] _data1 = new Int32[ElementCount]; private static Int32[] _data2 = new Int32[ElementCount]; private static Vector128<Int32> _clsVar1; private static Vector128<Int32> _clsVar2; private Vector128<Int32> _fld1; private Vector128<Int32> _fld2; private SimpleBinaryOpTest__DataTable<Int32> _dataTable; static SimpleBinaryOpTest__CompareLessThanInt32() { var random = new Random(); for (var i = 0; i < ElementCount; i++) { _data1[i] = (int)(random.Next(int.MinValue, int.MaxValue)); _data2[i] = (int)(random.Next(int.MinValue, int.MaxValue)); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int32>, byte>(ref _clsVar1), ref Unsafe.As<Int32, byte>(ref _data2[0]), VectorSize); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int32>, byte>(ref _clsVar2), ref Unsafe.As<Int32, byte>(ref _data1[0]), VectorSize); } public SimpleBinaryOpTest__CompareLessThanInt32() { Succeeded = true; var random = new Random(); for (var i = 0; i < ElementCount; i++) { _data1[i] = (int)(random.Next(int.MinValue, int.MaxValue)); _data2[i] = (int)(random.Next(int.MinValue, int.MaxValue)); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int32>, byte>(ref _fld1), ref Unsafe.As<Int32, byte>(ref _data1[0]), VectorSize); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int32>, byte>(ref _fld2), ref Unsafe.As<Int32, byte>(ref _data2[0]), VectorSize); for (var i = 0; i < ElementCount; i++) { _data1[i] = (int)(random.Next(int.MinValue, int.MaxValue)); _data2[i] = (int)(random.Next(int.MinValue, int.MaxValue)); } _dataTable = new SimpleBinaryOpTest__DataTable<Int32>(_data1, _data2, new Int32[ElementCount], VectorSize); } public bool IsSupported => Sse2.IsSupported; public bool Succeeded { get; set; } public void RunBasicScenario_UnsafeRead() { var result = Sse2.CompareLessThan( Unsafe.Read<Vector128<Int32>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector128<Int32>>(_dataTable.inArray2Ptr) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_Load() { var result = Sse2.CompareLessThan( Sse2.LoadVector128((Int32*)(_dataTable.inArray1Ptr)), Sse2.LoadVector128((Int32*)(_dataTable.inArray2Ptr)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_LoadAligned() { var result = Sse2.CompareLessThan( Sse2.LoadAlignedVector128((Int32*)(_dataTable.inArray1Ptr)), Sse2.LoadAlignedVector128((Int32*)(_dataTable.inArray2Ptr)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_UnsafeRead() { var result = typeof(Sse2).GetMethod(nameof(Sse2.CompareLessThan), new Type[] { typeof(Vector128<Int32>), typeof(Vector128<Int32>) }) .Invoke(null, new object[] { Unsafe.Read<Vector128<Int32>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector128<Int32>>(_dataTable.inArray2Ptr) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Int32>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_Load() { var result = typeof(Sse2).GetMethod(nameof(Sse2.CompareLessThan), new Type[] { typeof(Vector128<Int32>), typeof(Vector128<Int32>) }) .Invoke(null, new object[] { Sse2.LoadVector128((Int32*)(_dataTable.inArray1Ptr)), Sse2.LoadVector128((Int32*)(_dataTable.inArray2Ptr)) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Int32>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_LoadAligned() { var result = typeof(Sse2).GetMethod(nameof(Sse2.CompareLessThan), new Type[] { typeof(Vector128<Int32>), typeof(Vector128<Int32>) }) .Invoke(null, new object[] { Sse2.LoadAlignedVector128((Int32*)(_dataTable.inArray1Ptr)), Sse2.LoadAlignedVector128((Int32*)(_dataTable.inArray2Ptr)) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Int32>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunClsVarScenario() { var result = Sse2.CompareLessThan( _clsVar1, _clsVar2 ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr); } public void RunLclVarScenario_UnsafeRead() { var left = Unsafe.Read<Vector128<Int32>>(_dataTable.inArray1Ptr); var right = Unsafe.Read<Vector128<Int32>>(_dataTable.inArray2Ptr); var result = Sse2.CompareLessThan(left, right); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(left, right, _dataTable.outArrayPtr); } public void RunLclVarScenario_Load() { var left = Sse2.LoadVector128((Int32*)(_dataTable.inArray1Ptr)); var right = Sse2.LoadVector128((Int32*)(_dataTable.inArray2Ptr)); var result = Sse2.CompareLessThan(left, right); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(left, right, _dataTable.outArrayPtr); } public void RunLclVarScenario_LoadAligned() { var left = Sse2.LoadAlignedVector128((Int32*)(_dataTable.inArray1Ptr)); var right = Sse2.LoadAlignedVector128((Int32*)(_dataTable.inArray2Ptr)); var result = Sse2.CompareLessThan(left, right); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(left, right, _dataTable.outArrayPtr); } public void RunLclFldScenario() { var test = new SimpleBinaryOpTest__CompareLessThanInt32(); var result = Sse2.CompareLessThan(test._fld1, test._fld2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } public void RunFldScenario() { var result = Sse2.CompareLessThan(_fld1, _fld2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr); } public void RunUnsupportedScenario() { Succeeded = false; try { RunBasicScenario_UnsafeRead(); } catch (PlatformNotSupportedException) { Succeeded = true; } } private void ValidateResult(Vector128<Int32> left, Vector128<Int32> right, void* result, [CallerMemberName] string method = "") { Int32[] inArray1 = new Int32[ElementCount]; Int32[] inArray2 = new Int32[ElementCount]; Int32[] outArray = new Int32[ElementCount]; Unsafe.Write(Unsafe.AsPointer(ref inArray1[0]), left); Unsafe.Write(Unsafe.AsPointer(ref inArray2[0]), right); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int32, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize); ValidateResult(inArray1, inArray2, outArray, method); } private void ValidateResult(void* left, void* right, void* result, [CallerMemberName] string method = "") { Int32[] inArray1 = new Int32[ElementCount]; Int32[] inArray2 = new Int32[ElementCount]; Int32[] outArray = new Int32[ElementCount]; Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int32, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(left), VectorSize); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int32, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(right), VectorSize); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int32, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize); ValidateResult(inArray1, inArray2, outArray, method); } private void ValidateResult(Int32[] left, Int32[] right, Int32[] result, [CallerMemberName] string method = "") { if (result[0] != ((left[0] < right[0]) ? unchecked((int)(-1)) : 0)) { Succeeded = false; } else { for (var i = 1; i < left.Length; i++) { if (result[i] != ((left[i] < right[i]) ? unchecked((int)(-1)) : 0)) { Succeeded = false; break; } } } if (!Succeeded) { Console.WriteLine($"{nameof(Sse2)}.{nameof(Sse2.CompareLessThan)}<Int32>: {method} failed:"); Console.WriteLine($" left: ({string.Join(", ", left)})"); Console.WriteLine($" right: ({string.Join(", ", right)})"); Console.WriteLine($" result: ({string.Join(", ", result)})"); Console.WriteLine(); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /****************************************************************************** * This file is auto-generated from a template file by the GenerateTests.csx * * script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make * * changes, please update the corresponding template and run according to the * * directions listed in the file. * ******************************************************************************/ using System; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.Intrinsics; using System.Runtime.Intrinsics.X86; namespace JIT.HardwareIntrinsics.X86 { public static partial class Program { private static void AddInt32() { var test = new SimpleBinaryOpTest__AddInt32(); if (test.IsSupported) { // Validates basic functionality works, using Unsafe.Read test.RunBasicScenario_UnsafeRead(); // Validates basic functionality works, using Load test.RunBasicScenario_Load(); // Validates basic functionality works, using LoadAligned test.RunBasicScenario_LoadAligned(); // Validates calling via reflection works, using Unsafe.Read test.RunReflectionScenario_UnsafeRead(); // Validates calling via reflection works, using Load test.RunReflectionScenario_Load(); // Validates calling via reflection works, using LoadAligned test.RunReflectionScenario_LoadAligned(); // Validates passing a static member works test.RunClsVarScenario(); // Validates passing a local works, using Unsafe.Read test.RunLclVarScenario_UnsafeRead(); // Validates passing a local works, using Load test.RunLclVarScenario_Load(); // Validates passing a local works, using LoadAligned test.RunLclVarScenario_LoadAligned(); // Validates passing the field of a local works test.RunLclFldScenario(); // Validates passing an instance member works test.RunFldScenario(); } else { // Validates we throw on unsupported hardware test.RunUnsupportedScenario(); } if (!test.Succeeded) { throw new Exception("One or more scenarios did not complete as expected."); } } } public sealed unsafe class SimpleBinaryOpTest__AddInt32 { private const int VectorSize = 32; private const int ElementCount = VectorSize / sizeof(Int32); private static Int32[] _data1 = new Int32[ElementCount]; private static Int32[] _data2 = new Int32[ElementCount]; private static Vector256<Int32> _clsVar1; private static Vector256<Int32> _clsVar2; private Vector256<Int32> _fld1; private Vector256<Int32> _fld2; private SimpleBinaryOpTest__DataTable<Int32> _dataTable; static SimpleBinaryOpTest__AddInt32() { var random = new Random(); for (var i = 0; i < ElementCount; i++) { _data1[i] = (int)(random.Next(int.MinValue, int.MaxValue)); _data2[i] = (int)(random.Next(int.MinValue, int.MaxValue)); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Int32>, byte>(ref _clsVar1), ref Unsafe.As<Int32, byte>(ref _data2[0]), VectorSize); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Int32>, byte>(ref _clsVar2), ref Unsafe.As<Int32, byte>(ref _data1[0]), VectorSize); } public SimpleBinaryOpTest__AddInt32() { Succeeded = true; var random = new Random(); for (var i = 0; i < ElementCount; i++) { _data1[i] = (int)(random.Next(int.MinValue, int.MaxValue)); _data2[i] = (int)(random.Next(int.MinValue, int.MaxValue)); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Int32>, byte>(ref _fld1), ref Unsafe.As<Int32, byte>(ref _data1[0]), VectorSize); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Int32>, byte>(ref _fld2), ref Unsafe.As<Int32, byte>(ref _data2[0]), VectorSize); for (var i = 0; i < ElementCount; i++) { _data1[i] = (int)(random.Next(int.MinValue, int.MaxValue)); _data2[i] = (int)(random.Next(int.MinValue, int.MaxValue)); } _dataTable = new SimpleBinaryOpTest__DataTable<Int32>(_data1, _data2, new Int32[ElementCount], VectorSize); } public bool IsSupported => Avx2.IsSupported; public bool Succeeded { get; set; } public void RunBasicScenario_UnsafeRead() { var result = Avx2.Add( Unsafe.Read<Vector256<Int32>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector256<Int32>>(_dataTable.inArray2Ptr) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_Load() { var result = Avx2.Add( Avx.LoadVector256((Int32*)(_dataTable.inArray1Ptr)), Avx.LoadVector256((Int32*)(_dataTable.inArray2Ptr)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_LoadAligned() { var result = Avx2.Add( Avx.LoadAlignedVector256((Int32*)(_dataTable.inArray1Ptr)), Avx.LoadAlignedVector256((Int32*)(_dataTable.inArray2Ptr)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_UnsafeRead() { var result = typeof(Avx2).GetMethod(nameof(Avx2.Add), new Type[] { typeof(Vector256<Int32>), typeof(Vector256<Int32>) }) .Invoke(null, new object[] { Unsafe.Read<Vector256<Int32>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector256<Int32>>(_dataTable.inArray2Ptr) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Int32>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_Load() { var result = typeof(Avx2).GetMethod(nameof(Avx2.Add), new Type[] { typeof(Vector256<Int32>), typeof(Vector256<Int32>) }) .Invoke(null, new object[] { Avx.LoadVector256((Int32*)(_dataTable.inArray1Ptr)), Avx.LoadVector256((Int32*)(_dataTable.inArray2Ptr)) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Int32>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_LoadAligned() { var result = typeof(Avx2).GetMethod(nameof(Avx2.Add), new Type[] { typeof(Vector256<Int32>), typeof(Vector256<Int32>) }) .Invoke(null, new object[] { Avx.LoadAlignedVector256((Int32*)(_dataTable.inArray1Ptr)), Avx.LoadAlignedVector256((Int32*)(_dataTable.inArray2Ptr)) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Int32>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunClsVarScenario() { var result = Avx2.Add( _clsVar1, _clsVar2 ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr); } public void RunLclVarScenario_UnsafeRead() { var left = Unsafe.Read<Vector256<Int32>>(_dataTable.inArray1Ptr); var right = Unsafe.Read<Vector256<Int32>>(_dataTable.inArray2Ptr); var result = Avx2.Add(left, right); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(left, right, _dataTable.outArrayPtr); } public void RunLclVarScenario_Load() { var left = Avx.LoadVector256((Int32*)(_dataTable.inArray1Ptr)); var right = Avx.LoadVector256((Int32*)(_dataTable.inArray2Ptr)); var result = Avx2.Add(left, right); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(left, right, _dataTable.outArrayPtr); } public void RunLclVarScenario_LoadAligned() { var left = Avx.LoadAlignedVector256((Int32*)(_dataTable.inArray1Ptr)); var right = Avx.LoadAlignedVector256((Int32*)(_dataTable.inArray2Ptr)); var result = Avx2.Add(left, right); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(left, right, _dataTable.outArrayPtr); } public void RunLclFldScenario() { var test = new SimpleBinaryOpTest__AddInt32(); var result = Avx2.Add(test._fld1, test._fld2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } public void RunFldScenario() { var result = Avx2.Add(_fld1, _fld2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr); } public void RunUnsupportedScenario() { Succeeded = false; try { RunBasicScenario_UnsafeRead(); } catch (PlatformNotSupportedException) { Succeeded = true; } } private void ValidateResult(Vector256<Int32> left, Vector256<Int32> right, void* result, [CallerMemberName] string method = "") { Int32[] inArray1 = new Int32[ElementCount]; Int32[] inArray2 = new Int32[ElementCount]; Int32[] outArray = new Int32[ElementCount]; Unsafe.Write(Unsafe.AsPointer(ref inArray1[0]), left); Unsafe.Write(Unsafe.AsPointer(ref inArray2[0]), right); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int32, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize); ValidateResult(inArray1, inArray2, outArray, method); } private void ValidateResult(void* left, void* right, void* result, [CallerMemberName] string method = "") { Int32[] inArray1 = new Int32[ElementCount]; Int32[] inArray2 = new Int32[ElementCount]; Int32[] outArray = new Int32[ElementCount]; Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int32, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(left), VectorSize); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int32, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(right), VectorSize); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int32, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize); ValidateResult(inArray1, inArray2, outArray, method); } private void ValidateResult(Int32[] left, Int32[] right, Int32[] result, [CallerMemberName] string method = "") { if ((int)(left[0] + right[0]) != result[0]) { Succeeded = false; } else { for (var i = 1; i < left.Length; i++) { if ((int)(left[i] + right[i]) != result[i]) { Succeeded = false; break; } } } if (!Succeeded) { Console.WriteLine($"{nameof(Avx2)}.{nameof(Avx2.Add)}<Int32>: {method} failed:"); Console.WriteLine($" left: ({string.Join(", ", left)})"); Console.WriteLine($" right: ({string.Join(", ", right)})"); Console.WriteLine($" result: ({string.Join(", ", result)})"); Console.WriteLine(); } } } }
#region Namespaces using System; using System.ComponentModel; using System.Data; using System.Xml; #endregion //Namespaces namespace Epi.Fields { /// <summary> /// Related view field /// </summary> public class RelatedViewField : FieldWithoutSeparatePrompt, IFieldWithCheckCodeClick //RenderableField { #region Private Members private XmlElement viewElement; private XmlElement fieldElement; private XmlNode fieldNode; private string condition = string.Empty; private bool shouldReturnToParent; private BackgroundWorker _updater; private BackgroundWorker _inserter; #endregion Private Members #region Constructors /// <summary> /// Constructor /// </summary> /// <param name="page">The page the field belongs to</param> public RelatedViewField(Page page) : base(page) { } /// <summary> /// Constructor /// </summary> /// <param name="view">The view the field belongs to</param> public RelatedViewField(View view) : base(view) { } /// <summary> /// Constructor /// </summary> /// <param name="page">The page the field belongs to</param> /// <param name="viewElement">Xml Element representation of Related View Field</param> public RelatedViewField(Page page, XmlElement viewElement) : base(page) { this.viewElement = viewElement; this.Page = page; } /// <summary> /// Constructor /// </summary> /// <param name="view">The view the field belongs to</param> /// <param name="fieldNode"></param> public RelatedViewField(View view, XmlNode fieldNode) : base(view) { this.fieldNode = fieldNode; this.view.Project.Metadata.GetFieldData(this, this.fieldNode); } /// <summary> /// Load a Related View Field from a <see cref="System.Data.DataRow"/> /// </summary> /// <param name="row"></param> public override void LoadFromRow(DataRow row) { base.LoadFromRow(row); condition = row["RelateCondition"].ToString(); //checkCodeBefore = row["ControlBeforeCheckCode"].ToString(); //checkCodeAfter = row["ControlAfterCheckCode"].ToString(); shouldReturnToParent = (bool)row["ShouldReturnToParent"]; if (row["RelatedViewId"] != System.DBNull.Value) { relatedViewID = (int)row["RelatedViewId"]; } } public RelatedViewField Clone() { RelatedViewField clone = (RelatedViewField)this.MemberwiseClone(); base.AssignMembers(clone); return clone; } #endregion #region Public Events /// <summary> /// Occurs when a user requests to see the related view of this field /// </summary> public event ChildViewRequestedEventHandler ChildViewRequested; #endregion #region Public Properties /// <summary> /// Returns field type /// </summary> public override MetaFieldType FieldType { get { return MetaFieldType.Relate; } } /// <summary> /// Gets the related view of the field /// </summary> public View ChildView { get { return GetMetadata().GetChildView(this); } } /// <summary> /// Gets/sets the condition for going to related view /// </summary> public string Condition { get { return (condition); } set { condition = value; } } /// <summary> /// Gets/sets whether the related view should return to its parent /// </summary> public bool ShouldReturnToParent { get { return (shouldReturnToParent); } set { shouldReturnToParent = value; } } private int relatedViewID; /// <summary> /// Id of view list as related to this field /// </summary> public int RelatedViewID { get { return relatedViewID; } set { relatedViewID = value; } } //isu6 - Implementing checkcode for Related view field. /// <summary> /// CheckCode After property for all DropDown list fields /// </summary> public string CheckCodeAfter { get { return (checkCodeAfter); } set { checkCodeAfter = value; } } /// <summary> /// CheckCode Before property for all DropDown list fields /// </summary> public string CheckCodeClick { get { return (checkCodeBefore); } set { checkCodeBefore = value; } } #endregion #region Protected Properties /// <summary> /// Check Code After member variable /// </summary> protected string checkCodeAfter = string.Empty; /// <summary> /// Check Code Before member variable /// </summary> protected string checkCodeBefore = string.Empty; #endregion Protected Properties #region Public Methods /// <summary> /// Deletes the field /// </summary> public override void Delete() { View childView = this.GetProject().GetViewById(RelatedViewID); if (childView != null) { childView.IsRelatedView = false; GetMetadata().UpdateView(childView); } GetMetadata().DeleteField(this); view.MustRefreshFieldCollection = true; } /// <summary> /// The view element of the field /// </summary> public XmlElement ViewElement { get { return viewElement; } set { viewElement = value; } } #endregion #region Protected Methods /// <summary> /// Inserts the field to the database /// </summary> protected override void InsertField() { this.Id = GetMetadata().CreateField(this); base.OnFieldAdded(); } /// <summary> /// Update the field to the database /// </summary> protected override void UpdateField() { GetMetadata().UpdateField(this); } ///// <summary> ///// Inserts the field to the database ///// </summary> //protected override void InsertField() //{ // insertStarted = true; // _inserter = new BackgroundWorker(); // _inserter.DoWork += new DoWorkEventHandler(inserter_DoWork); // _inserter.RunWorkerCompleted += new RunWorkerCompletedEventHandler(_inserter_RunWorkerCompleted); // _inserter.RunWorkerAsync(); //} //void _inserter_RunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e) //{ // OnFieldInserted(this); //} //void inserter_DoWork(object sender, DoWorkEventArgs e) //{ // fieldsWaitingToUpdate++; // lock (view.FieldLockToken) // { // this.Id = GetMetadata().CreateField(this); // base.OnFieldAdded(); // fieldsWaitingToUpdate--; // } //} ///// <summary> ///// Update the field to the database ///// </summary> //protected override void UpdateField() //{ // _updater = new BackgroundWorker(); // _updater.DoWork += new DoWorkEventHandler(DoWork); // _updater.RunWorkerCompleted += new RunWorkerCompletedEventHandler(_updater_RunWorkerCompleted); // _updater.RunWorkerAsync(); //} //void _updater_RunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e) //{ // OnFieldUpdated(this); //} //private void DoWork(object sender, DoWorkEventArgs e) //{ // fieldsWaitingToUpdate++; // lock (view.FieldLockToken) // { // GetMetadata().UpdateField(this); // fieldsWaitingToUpdate--; // } //} #endregion Protected Methods #region Event Handlers /// <summary> /// Handles the click event of the Edit Field menu item /// </summary> /// <param name="sender">.NET supplied object</param> /// <param name="e">.NET supplied event parameters</param> private void MnuEditRelate_Click(object sender, EventArgs e) { //base.RaiseEventFieldDefinitionRequested(); } /// <summary> /// Handles the click event of the Related View menu item /// </summary> /// <param name="sender">.NET supplied object</param> /// <param name="e">.NET supplied event parameters</param> private void MnuRelatedView_Click(object sender, EventArgs e) { if (ChildViewRequested != null) { ChildViewRequested(this, new ChildViewRequestedEventArgs(ChildView)); } } #endregion Event Handlers #region Private Methods /// <summary> /// Creates an Xml attribute and sets a value. /// (NOTE: fieldElement must be set to viewElement.OwnerDocument.CreateElement("Field").) /// </summary> /// <param name="Attribute">System.Xml.XmlAttribute.</param> /// <param name="Value">Sets the value of the node.</param> private void AppendAttributeValue(string Attribute, string Value) { try { XmlAttribute xmlAttribute = viewElement.OwnerDocument.CreateAttribute(Attribute); xmlAttribute.Value = Value; if (fieldElement == null) fieldElement = viewElement.OwnerDocument.CreateElement("Field"); fieldElement.Attributes.Append(xmlAttribute); } catch (ArgumentException ex) { throw new GeneralException(SharedStrings.EXCEPTION_OCCURRED, ex); } } #endregion //Private Methods } }
using System; using Microsoft.Data.Entity; using Microsoft.Data.Entity.Infrastructure; using Microsoft.Data.Entity.Metadata; using Microsoft.Data.Entity.Migrations; using OttoMail.Models; namespace OttoMail.Migrations { [DbContext(typeof(ApplicationDbContext))] [Migration("20160518205113_FirstNameLastName2")] partial class FirstNameLastName2 { protected override void BuildTargetModel(ModelBuilder modelBuilder) { modelBuilder .HasAnnotation("ProductVersion", "7.0.0-rc1-16348") .HasAnnotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn); modelBuilder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityRole", b => { b.Property<string>("Id"); b.Property<string>("ConcurrencyStamp") .IsConcurrencyToken(); b.Property<string>("Name") .HasAnnotation("MaxLength", 256); b.Property<string>("NormalizedName") .HasAnnotation("MaxLength", 256); b.HasKey("Id"); b.HasIndex("NormalizedName") .HasAnnotation("Relational:Name", "RoleNameIndex"); b.HasAnnotation("Relational:TableName", "AspNetRoles"); }); modelBuilder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityRoleClaim<string>", b => { b.Property<int>("Id") .ValueGeneratedOnAdd(); b.Property<string>("ClaimType"); b.Property<string>("ClaimValue"); b.Property<string>("RoleId") .IsRequired(); b.HasKey("Id"); b.HasAnnotation("Relational:TableName", "AspNetRoleClaims"); }); modelBuilder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityUserClaim<string>", b => { b.Property<int>("Id") .ValueGeneratedOnAdd(); b.Property<string>("ClaimType"); b.Property<string>("ClaimValue"); b.Property<string>("UserId") .IsRequired(); b.HasKey("Id"); b.HasAnnotation("Relational:TableName", "AspNetUserClaims"); }); modelBuilder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityUserLogin<string>", b => { b.Property<string>("LoginProvider"); b.Property<string>("ProviderKey"); b.Property<string>("ProviderDisplayName"); b.Property<string>("UserId") .IsRequired(); b.HasKey("LoginProvider", "ProviderKey"); b.HasAnnotation("Relational:TableName", "AspNetUserLogins"); }); modelBuilder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityUserRole<string>", b => { b.Property<string>("UserId"); b.Property<string>("RoleId"); b.HasKey("UserId", "RoleId"); b.HasAnnotation("Relational:TableName", "AspNetUserRoles"); }); modelBuilder.Entity("OttoMail.Models.ApplicationUser", b => { b.Property<string>("Id"); b.Property<int>("AccessFailedCount"); b.Property<string>("ConcurrencyStamp") .IsConcurrencyToken(); b.Property<string>("Email") .HasAnnotation("MaxLength", 256); b.Property<bool>("EmailConfirmed"); b.Property<string>("FirstName"); b.Property<string>("LastName"); b.Property<bool>("LockoutEnabled"); b.Property<DateTimeOffset?>("LockoutEnd"); b.Property<string>("NormalizedEmail") .HasAnnotation("MaxLength", 256); b.Property<string>("NormalizedUserName") .HasAnnotation("MaxLength", 256); b.Property<string>("PasswordHash"); b.Property<string>("PhoneNumber"); b.Property<bool>("PhoneNumberConfirmed"); b.Property<string>("SecurityStamp"); b.Property<bool>("TwoFactorEnabled"); b.Property<string>("UserName") .HasAnnotation("MaxLength", 256); b.HasKey("Id"); b.HasIndex("NormalizedEmail") .HasAnnotation("Relational:Name", "EmailIndex"); b.HasIndex("NormalizedUserName") .HasAnnotation("Relational:Name", "UserNameIndex"); b.HasAnnotation("Relational:TableName", "AspNetUsers"); }); modelBuilder.Entity("OttoMail.Models.Email", b => { b.Property<int>("EmailId") .ValueGeneratedOnAdd(); b.Property<string>("Body"); b.Property<bool>("Checked"); b.Property<DateTime>("Date"); b.Property<bool>("Read"); b.Property<string>("Sender"); b.Property<string>("Subject"); b.Property<string>("UserId"); b.HasKey("EmailId"); b.HasAnnotation("Relational:TableName", "Emails"); }); modelBuilder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityRoleClaim<string>", b => { b.HasOne("Microsoft.AspNet.Identity.EntityFramework.IdentityRole") .WithMany() .HasForeignKey("RoleId"); }); modelBuilder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityUserClaim<string>", b => { b.HasOne("OttoMail.Models.ApplicationUser") .WithMany() .HasForeignKey("UserId"); }); modelBuilder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityUserLogin<string>", b => { b.HasOne("OttoMail.Models.ApplicationUser") .WithMany() .HasForeignKey("UserId"); }); modelBuilder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityUserRole<string>", b => { b.HasOne("Microsoft.AspNet.Identity.EntityFramework.IdentityRole") .WithMany() .HasForeignKey("RoleId"); b.HasOne("OttoMail.Models.ApplicationUser") .WithMany() .HasForeignKey("UserId"); }); modelBuilder.Entity("OttoMail.Models.Email", b => { b.HasOne("OttoMail.Models.ApplicationUser") .WithMany() .HasForeignKey("UserId"); }); } } }
/* Generated SBE (Simple Binary Encoding) message codec */ using System; using System.Text; using System.Collections.Generic; using Adaptive.Agrona; namespace Adaptive.Archiver.Codecs { public class TaggedReplicateRequestEncoder { public const ushort BLOCK_LENGTH = 52; public const ushort TEMPLATE_ID = 62; public const ushort SCHEMA_ID = 101; public const ushort SCHEMA_VERSION = 6; private TaggedReplicateRequestEncoder _parentMessage; private IMutableDirectBuffer _buffer; protected int _offset; protected int _limit; public TaggedReplicateRequestEncoder() { _parentMessage = this; } public ushort SbeBlockLength() { return BLOCK_LENGTH; } public ushort SbeTemplateId() { return TEMPLATE_ID; } public ushort SbeSchemaId() { return SCHEMA_ID; } public ushort SbeSchemaVersion() { return SCHEMA_VERSION; } public string SbeSemanticType() { return ""; } public IMutableDirectBuffer Buffer() { return _buffer; } public int Offset() { return _offset; } public TaggedReplicateRequestEncoder Wrap(IMutableDirectBuffer buffer, int offset) { this._buffer = buffer; this._offset = offset; Limit(offset + BLOCK_LENGTH); return this; } public TaggedReplicateRequestEncoder WrapAndApplyHeader( IMutableDirectBuffer buffer, int offset, MessageHeaderEncoder headerEncoder) { headerEncoder .Wrap(buffer, offset) .BlockLength(BLOCK_LENGTH) .TemplateId(TEMPLATE_ID) .SchemaId(SCHEMA_ID) .Version(SCHEMA_VERSION); return Wrap(buffer, offset + MessageHeaderEncoder.ENCODED_LENGTH); } public int EncodedLength() { return _limit - _offset; } public int Limit() { return _limit; } public void Limit(int limit) { this._limit = limit; } public static int ControlSessionIdEncodingOffset() { return 0; } public static int ControlSessionIdEncodingLength() { return 8; } public static long ControlSessionIdNullValue() { return -9223372036854775808L; } public static long ControlSessionIdMinValue() { return -9223372036854775807L; } public static long ControlSessionIdMaxValue() { return 9223372036854775807L; } public TaggedReplicateRequestEncoder ControlSessionId(long value) { _buffer.PutLong(_offset + 0, value, ByteOrder.LittleEndian); return this; } public static int CorrelationIdEncodingOffset() { return 8; } public static int CorrelationIdEncodingLength() { return 8; } public static long CorrelationIdNullValue() { return -9223372036854775808L; } public static long CorrelationIdMinValue() { return -9223372036854775807L; } public static long CorrelationIdMaxValue() { return 9223372036854775807L; } public TaggedReplicateRequestEncoder CorrelationId(long value) { _buffer.PutLong(_offset + 8, value, ByteOrder.LittleEndian); return this; } public static int SrcRecordingIdEncodingOffset() { return 16; } public static int SrcRecordingIdEncodingLength() { return 8; } public static long SrcRecordingIdNullValue() { return -9223372036854775808L; } public static long SrcRecordingIdMinValue() { return -9223372036854775807L; } public static long SrcRecordingIdMaxValue() { return 9223372036854775807L; } public TaggedReplicateRequestEncoder SrcRecordingId(long value) { _buffer.PutLong(_offset + 16, value, ByteOrder.LittleEndian); return this; } public static int DstRecordingIdEncodingOffset() { return 24; } public static int DstRecordingIdEncodingLength() { return 8; } public static long DstRecordingIdNullValue() { return -9223372036854775808L; } public static long DstRecordingIdMinValue() { return -9223372036854775807L; } public static long DstRecordingIdMaxValue() { return 9223372036854775807L; } public TaggedReplicateRequestEncoder DstRecordingId(long value) { _buffer.PutLong(_offset + 24, value, ByteOrder.LittleEndian); return this; } public static int ChannelTagIdEncodingOffset() { return 32; } public static int ChannelTagIdEncodingLength() { return 8; } public static long ChannelTagIdNullValue() { return -9223372036854775808L; } public static long ChannelTagIdMinValue() { return -9223372036854775807L; } public static long ChannelTagIdMaxValue() { return 9223372036854775807L; } public TaggedReplicateRequestEncoder ChannelTagId(long value) { _buffer.PutLong(_offset + 32, value, ByteOrder.LittleEndian); return this; } public static int SubscriptionTagIdEncodingOffset() { return 40; } public static int SubscriptionTagIdEncodingLength() { return 8; } public static long SubscriptionTagIdNullValue() { return -9223372036854775808L; } public static long SubscriptionTagIdMinValue() { return -9223372036854775807L; } public static long SubscriptionTagIdMaxValue() { return 9223372036854775807L; } public TaggedReplicateRequestEncoder SubscriptionTagId(long value) { _buffer.PutLong(_offset + 40, value, ByteOrder.LittleEndian); return this; } public static int SrcControlStreamIdEncodingOffset() { return 48; } public static int SrcControlStreamIdEncodingLength() { return 4; } public static int SrcControlStreamIdNullValue() { return -2147483648; } public static int SrcControlStreamIdMinValue() { return -2147483647; } public static int SrcControlStreamIdMaxValue() { return 2147483647; } public TaggedReplicateRequestEncoder SrcControlStreamId(int value) { _buffer.PutInt(_offset + 48, value, ByteOrder.LittleEndian); return this; } public static int SrcControlChannelId() { return 8; } public static string SrcControlChannelCharacterEncoding() { return "US-ASCII"; } public static string SrcControlChannelMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.EPOCH: return "unix"; case MetaAttribute.TIME_UNIT: return "nanosecond"; case MetaAttribute.SEMANTIC_TYPE: return ""; case MetaAttribute.PRESENCE: return "required"; } return ""; } public static int SrcControlChannelHeaderLength() { return 4; } public TaggedReplicateRequestEncoder PutSrcControlChannel(IDirectBuffer src, int srcOffset, int length) { if (length > 1073741824) { throw new InvalidOperationException("length > maxValue for type: " + length); } int headerLength = 4; int limit = _parentMessage.Limit(); _parentMessage.Limit(limit + headerLength + length); _buffer.PutInt(limit, unchecked((int)length), ByteOrder.LittleEndian); _buffer.PutBytes(limit + headerLength, src, srcOffset, length); return this; } public TaggedReplicateRequestEncoder PutSrcControlChannel(byte[] src, int srcOffset, int length) { if (length > 1073741824) { throw new InvalidOperationException("length > maxValue for type: " + length); } int headerLength = 4; int limit = _parentMessage.Limit(); _parentMessage.Limit(limit + headerLength + length); _buffer.PutInt(limit, unchecked((int)length), ByteOrder.LittleEndian); _buffer.PutBytes(limit + headerLength, src, srcOffset, length); return this; } public TaggedReplicateRequestEncoder SrcControlChannel(string value) { int length = value.Length; if (length > 1073741824) { throw new InvalidOperationException("length > maxValue for type: " + length); } int headerLength = 4; int limit = _parentMessage.Limit(); _parentMessage.Limit(limit + headerLength + length); _buffer.PutInt(limit, unchecked((int)length), ByteOrder.LittleEndian); _buffer.PutStringWithoutLengthAscii(limit + headerLength, value); return this; } public static int LiveDestinationId() { return 9; } public static string LiveDestinationCharacterEncoding() { return "US-ASCII"; } public static string LiveDestinationMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.EPOCH: return "unix"; case MetaAttribute.TIME_UNIT: return "nanosecond"; case MetaAttribute.SEMANTIC_TYPE: return ""; case MetaAttribute.PRESENCE: return "required"; } return ""; } public static int LiveDestinationHeaderLength() { return 4; } public TaggedReplicateRequestEncoder PutLiveDestination(IDirectBuffer src, int srcOffset, int length) { if (length > 1073741824) { throw new InvalidOperationException("length > maxValue for type: " + length); } int headerLength = 4; int limit = _parentMessage.Limit(); _parentMessage.Limit(limit + headerLength + length); _buffer.PutInt(limit, unchecked((int)length), ByteOrder.LittleEndian); _buffer.PutBytes(limit + headerLength, src, srcOffset, length); return this; } public TaggedReplicateRequestEncoder PutLiveDestination(byte[] src, int srcOffset, int length) { if (length > 1073741824) { throw new InvalidOperationException("length > maxValue for type: " + length); } int headerLength = 4; int limit = _parentMessage.Limit(); _parentMessage.Limit(limit + headerLength + length); _buffer.PutInt(limit, unchecked((int)length), ByteOrder.LittleEndian); _buffer.PutBytes(limit + headerLength, src, srcOffset, length); return this; } public TaggedReplicateRequestEncoder LiveDestination(string value) { int length = value.Length; if (length > 1073741824) { throw new InvalidOperationException("length > maxValue for type: " + length); } int headerLength = 4; int limit = _parentMessage.Limit(); _parentMessage.Limit(limit + headerLength + length); _buffer.PutInt(limit, unchecked((int)length), ByteOrder.LittleEndian); _buffer.PutStringWithoutLengthAscii(limit + headerLength, value); return this; } public override string ToString() { return AppendTo(new StringBuilder(100)).ToString(); } public StringBuilder AppendTo(StringBuilder builder) { TaggedReplicateRequestDecoder writer = new TaggedReplicateRequestDecoder(); writer.Wrap(_buffer, _offset, BLOCK_LENGTH, SCHEMA_VERSION); return writer.AppendTo(builder); } } }
using System; using System.Threading.Tasks; using Moq; using Xunit; using ISTS.Application.Common; using ISTS.Application.Sessions; using ISTS.Domain.Rooms; using ISTS.Domain.Common; using ISTS.Domain.Sessions; namespace ISTS.Application.Test.Sessions { public class SessionChargeCalculatorServiceTests { private readonly Mock<ISessionRepository> _sessionRepository; private readonly Mock<IRoomRepository> _roomRepository; private readonly ISessionChargeCalculatorService _sessionChargeCalculatorService; private static Guid StudioId = Guid.NewGuid(); private static Guid RoomId = Guid.NewGuid(); private static Guid RoomFunctionId = Guid.NewGuid(); private static DateTime Start = DateTime.Now; private static DateTime End = Start.AddHours(2); private static DateRange Schedule = DateRange.Create(Start, End); public SessionChargeCalculatorServiceTests() { _sessionRepository = new Mock<ISessionRepository>(); _roomRepository = new Mock<IRoomRepository>(); _sessionChargeCalculatorService = new SessionChargeCalculatorService( _sessionRepository.Object, _roomRepository.Object); } [Fact] public async void CalculateTotalCharge_Returns_Zero_When_RoomFunctionId_Is_Null() { var session = Session.Create( RoomId, Schedule, null); session.SetActualStartTime(Start); session.SetActualEndTime(End); _sessionRepository .Setup(r => r.GetAsync(It.IsAny<Guid>())) .Returns(Task.FromResult(session)); var result = await _sessionChargeCalculatorService.CalculateTotalCharge(session.Id); Assert.Equal(0, result); } [Fact] public async void CalculateTotalCharge_Throws_When_Session_Not_Started() { var session = Session.Create( RoomId, Schedule, RoomFunctionId); _sessionRepository .Setup(r => r.GetAsync(It.IsAny<Guid>())) .Returns(Task.FromResult(session)); var ex = await Assert.ThrowsAsync<DataValidationException>(() => _sessionChargeCalculatorService.CalculateTotalCharge(session.Id)); Assert.IsType<DataValidationException>(ex); Assert.NotNull(ex.InnerException); Assert.IsType<SessionNotStartedException>(ex.InnerException); } [Fact] public async void CalculateTotalCharge_Throws_When_Session_Not_Ended() { var session = Session.Create( RoomId, Schedule, RoomFunctionId); session.SetActualStartTime(Start); _sessionRepository .Setup(r => r.GetAsync(It.IsAny<Guid>())) .Returns(Task.FromResult(session)); var ex = await Assert.ThrowsAsync<DataValidationException>(() => _sessionChargeCalculatorService.CalculateTotalCharge(session.Id)); Assert.IsType<DataValidationException>(ex); Assert.NotNull(ex.InnerException); Assert.IsType<SessionNotEndedException>(ex.InnerException); } [Fact] public async void CalculateTotalCharge_Hourly_Returns_MinimumCharge_If_Total_Less_Than_Minimum() { var minimum = 50; var room = Room.Create(StudioId, "Room"); var function = room.AddRoomFunction("Vocal tracking", "Tracking vocals for a song"); function.SetBillingRate(BillingRate.HourlyType, 10, minimum); var session = Session.Create( RoomId, Schedule, function.Id); session.SetActualStartTime(Start); session.SetActualEndTime(End); _sessionRepository .Setup(r => r.GetAsync(It.IsAny<Guid>())) .Returns(Task.FromResult(session)); _roomRepository .Setup(r => r.GetAsync(It.IsAny<Guid>())) .Returns(Task.FromResult(room)); var result = await _sessionChargeCalculatorService.CalculateTotalCharge(session.Id); Assert.Equal(minimum, result); } [Fact] public async void CalculateTotalCharge_Hourly_Returns_Total() { var minimum = 25; var expectedTotal = 2 * 25; var room = Room.Create(StudioId, "Room"); var function = room.AddRoomFunction("Vocal tracking", "Tracking vocals for a song"); function.SetBillingRate(BillingRate.HourlyType, 25, minimum); var session = Session.Create( RoomId, Schedule, function.Id); session.SetActualStartTime(Start); session.SetActualEndTime(End); _sessionRepository .Setup(r => r.GetAsync(It.IsAny<Guid>())) .Returns(Task.FromResult(session)); _roomRepository .Setup(r => r.GetAsync(It.IsAny<Guid>())) .Returns(Task.FromResult(room)); var result = await _sessionChargeCalculatorService.CalculateTotalCharge(session.Id); Assert.Equal(expectedTotal, result); } [Fact] public async void CalculateTotalCharge_FlatRate_Returns_FlatRate_Total() { var expectedTotal = 100; var room = Room.Create(StudioId, "Room"); var function = room.AddRoomFunction("Vocal tracking", "Tracking vocals for a song"); function.SetBillingRate(BillingRate.FlatRateType, expectedTotal, 0); var session = Session.Create( RoomId, Schedule, function.Id); session.SetActualStartTime(Start); session.SetActualEndTime(End); _sessionRepository .Setup(r => r.GetAsync(It.IsAny<Guid>())) .Returns(Task.FromResult(session)); _roomRepository .Setup(r => r.GetAsync(It.IsAny<Guid>())) .Returns(Task.FromResult(room)); var result = await _sessionChargeCalculatorService.CalculateTotalCharge(session.Id); Assert.Equal(expectedTotal, result); } [Fact] public async void CalculateTotalCharge_None_Returns_Zero() { var expectedTotal = 0; var room = Room.Create(StudioId, "Room"); var function = room.AddRoomFunction("Vocal tracking", "Tracking vocals for a song"); var session = Session.Create( RoomId, Schedule, function.Id); session.SetActualStartTime(Start); session.SetActualEndTime(End); _sessionRepository .Setup(r => r.GetAsync(It.IsAny<Guid>())) .Returns(Task.FromResult(session)); _roomRepository .Setup(r => r.GetAsync(It.IsAny<Guid>())) .Returns(Task.FromResult(room)); var result = await _sessionChargeCalculatorService.CalculateTotalCharge(session.Id); Assert.Equal(expectedTotal, result); } } }
exec("./verbHelp.gui"); new ScriptObject(Verbs) { // Allows us to use the onEvent callback. class = StateMachine; // The null state is only necessary to we have an enterReady callback when // the state machine is first 'activated' (i.e. given a ready event). state = null; transition[null, ready] = ready; // Catch these events from every state and return to ready. transition[_, finish] = ready; transition[_, cancel] = ready; // Verbs are your only transitions out of the ready state. transition[ready, attack] = attackTarget; transition[ready, stop] = stop; transition[ready, move] = selectDirection; transition[ready, cover] = coverTarget; // Must target someone for these verbs. transition[attackTarget, enemyTargeted] = attack; transition[coverTarget, coverTargeted] = cover; // Location targeting. transition[selectDirection, directionSelected] = move; }; // Utility function for defining verbs. function Verbs::define(%this, %key, %verb) { %this.map.bindCmd(keyboard, %key, "Verbs.onEvent(" @ %verb @ ");", ""); %this.helpText[%verb] = %key @ " " @ %verb @ "\n"; } function Verbs::onStart(%this) { GameGroup.add(Verbs); PlayGui.add(VerbHelpDlg); // Respond to keypresses. %this.map = new ActionMap(); %this.globalMap = new ActionMap(); %this.directionMap = new ActionMap(); // Add some verbs that allow the knights to perform actions. %this.define("a", "Attack"); %this.define("s", "Stop"); %this.define("c", "Cover"); %this.define("m", "Move"); // Keyboard actions that should be available in any state. %this.globalMap.bindCmd(keyboard, "ctrl c", "Verbs.onEvent(cancel);"); %this.globalMap.bindCmd(keyboard, "?", "Verbs.toggleHelp();"); %this.globalMap.push(); // Direction selection actions. foreach$(%d in "w e d c s z a q") { %this.directionMap.bindCmd(keyboard, %d, "Verbs.direction ="@%d@"; Verbs.onEvent(directionSelected);"); } %this.helpText[directionSelected] = "w Away\n" @ "a Left\n" @ "s Towards\n" @ "d Right\n\n" @ "qezc Others"; %this.helpText[coverTargeted] = %this.helpText[enemyTargeted] = "j Closest\n" @ "k Next closest\n" @ " etc...\n"; // Start up the state machine. %this.onEvent(ready); } function Verbs::onEnd(%this) { %this.map.delete(); %this.globalMap.delete(); %this.directionMap.delete(); } //----------------------------------------------------------------------------- function Verbs::onEvent(%this, %event) { // Call the regular parent function first, so that the state has changed // before we try to get the help text for it. Parent::onEvent(%this, %event); %this.updateHelpDlg(); } function Verbs::updateHelpDlg(%this) { VerbHelpText.setText(""); // We're going to iterate over every dynamic field on the state machine. These // fields include all the state transitions, the ones we're interested in. // What we want to find is whether, for each transition from our current state, // there is help text for that transition. If there is, we add it to the help // gui. %len = %this.getDynamicFieldCount(); for(%i = 0; %i < %len; %i++) { // Dynamic fields are structured as "key" TAB "value". %field = %this.getDynamicField(%i); %value = getField(%field, 1); %field = getField(%field, 0); // We only care about transition fields, which will have the form // "transition" @ "state" @ "_" @ "event". if(startsWith(%field, "transition")) { // Chop the "transition" off. %field = getSubStr(%field, 10); // Split on tabs! %field = strReplace(%field, _, "\t"); // Now we can easily extract the state and event using fields. %state = getField(%field, 0); %event = getField(%field, 1); // Only consider transitions out of our current state. if(%state !$= "" && %event !$= "") { if(%state $= %this.state) { VerbHelpText.addText(%this.helpText[%event], true); } } } } // Special case: you can always cancel. if(%this.state !$= ready) { VerbHelpText.addText("\nctrl c Cancel\n", true); } // Resize the dialog so it looks nice. VerbHelpDlg.extent = getWord(VerbHelpDlg.extent, 0) SPC 14 * getRecordCount(VerbHelpText.getText()); } function Verbs::toggleHelp(%this) { // If there's a currently-running tweening effect, we need to kill it before // going back in the other direction. if(VerbHelpDlg.tween) { VerbHelpDlg.tween.delete(); } // Toggle deployed state which tells us which direction to tween in. VerbHelpDlg.deployed = !VerbHelpDlg.deployed; // And start the tween! VerbHelpDlg.tween = Tweens.toOnce(200, VerbHelpDlg, VerbHelpDlg.deployed ? "position: 20 252" : "position: -200 252"); } function Verbs::endVerb(%this) { %this.onEvent(finish); } //----------------------------------------------------------------------------- // Event scripts function Verbs::enterReady(%this) { %this.map.push(); } function Verbs::leaveReady(%this) { %this.map.pop(); } function Verbs::enterAttackTarget(%this) { Enemies.beginTarget(); } function Verbs::leaveAttackTarget(%this) { Enemies.endTarget(); } function Verbs::enterAttack(%this) { Knight.getDataBlock().attack(Knight, %this.target); %this.target = ""; %this.endVerb(); } function Verbs::enterCoverTarget(%this) { Cover.beginTarget(); } function Verbs::leaveCoverTarget(%this) { Cover.endTarget(); } function Verbs::enterCover(%this) { Knight.getDataBlock().takeCover(Knight, %this.target); %this.target = ""; %this.endVerb(); } function Verbs::enterStop(%this) { Knight.getDataBlock().stopAll(Knight); %this.endVerb(); } function Verbs::enterSelectDirection(%this) { %this.directionMap.push(); } function Verbs::leaveSelectDirection(%this) { %this.directionMap.pop(); } function Verbs::enterMove(%this) { // Get the movement direction from the letter. Basically, combine the different // cardinal directions in different ways to construct 8 possible movement // vectors. We don't worry about normalising thenvector here... switch$(%this.direction) { case w: %dir = TheCamera.getForwardVector(); case a: %dir = VectorScale(TheCamera.getRightVector(), -1); case s: %dir = VectorScale(TheCamera.getForwardVector(), -1); case d: %dir = TheCamera.getRightVector(); case q: %dir = VectorAdd(VectorScale(TheCamera.getRightVector(), -1), TheCamera.getForwardVector()); case e: %dir = VectorAdd(TheCamera.getRightVector(), TheCamera.getForwardVector()); case c: %dir = VectorAdd(TheCamera.getRightVector(), VectorScale(TheCamera.getForwardVector(), -1)); case z: %dir = VectorScale(VectorAdd(TheCamera.getRightVector(), TheCamera.getForwardVector()), -1); } // ...because it's normalised here, at the same time as becoming horizontal. %dir = VectorNormalize(getWords(%dir, 0, 1) SPC 0); // Construct a new position to move to by colliding a ray with the edge of the // arena circle. So glad I decided to go with the circular arena! %pos = rayCircle(Knight.getPosition(), %dir, 50); // Bring the position in from the edge of the circle a bit. %pos = VectorSub(%pos, VectorScale(%dir, 2)); Knight.goTo(%pos, true, 0.5); %this.endVerb(); } function rayCircle(%pos, %ray, %radius) { // Ray/circle intersection: http://stackoverflow.com/a/1549997/945863 // Assume circle is centered at 0, 0, 1. %pos = getWords(%pos, 0, 1) SPC 1; %Dx = getWord(%pos, 0); %Dy = getWord(%pos, 1); %a = mPow(VectorLen(%ray), 2); %b = 2 * %Dx * getword(%ray, 0) + 2 * %Dy * getWord(%ray, 1); %c = mPow(%Dx, 2) + mPow(%Dy, 2) - mPow(%radius, 2); %sol = mSolveQuadratic(%a, %b, %c); // The positive solution is the one we want. %x = getMax(getWord(%sol, 1), getWord(%sol, 2)); return VectorAdd(%pos, VectorScale(%ray, %x)); }
// todo: replace sphere geometry approximation with cone using System; using System.Collections.Generic; using Microsoft.Xna.Framework.Graphics; using Myre.Entities; using Myre.Entities.Behaviours; using Myre.Extensions; using Myre.Graphics.Geometry; using Myre.Graphics.Lighting; using Myre.Graphics.Materials; using Ninject; using System.Numerics; using SwizzleMyVectors.Geometry; using Color = Microsoft.Xna.Framework.Color; namespace Myre.Graphics.Deferred.LightManagers { public class DeferredSpotLightManager : BehaviourManager<SpotLight>, IDirectLight { class LightData { public SpotLight Light; public RenderTarget2D ShadowMap; public Matrix4x4 View; public Matrix4x4 Projection; } private readonly Material _geometryLightingMaterial; private readonly Material _quadLightingMaterial; private readonly Quad _quad; private readonly Model _geometry; private readonly View _shadowView; //private BasicEffect basicEffect; //private VertexPositionColor[] debugVertices; //private int[] debugIndices; private readonly List<LightData> _lights; private readonly List<LightData> _touchesNearPlane; private readonly List<LightData> _touchesFarPlane; private readonly List<LightData> _touchesBothPlanes; private readonly List<LightData> _touchesNeitherPlane; private readonly DepthStencilState _depthGreater; private GeometryRenderer _geometryRenderer; public DeferredSpotLightManager(IKernel kernel, GraphicsDevice device) { var effect = Content.Load<Effect>("SpotLight"); _geometryLightingMaterial = new Material(effect.Clone(), "Geometry"); _quadLightingMaterial = new Material(effect.Clone(), "Quad"); //basicEffect = new BasicEffect(device); //debugVertices = new VertexPositionColor[10]; //debugIndices = new int[(debugVertices.Length - 1) * 2 * 2]; //for (int i = 1; i < debugVertices.Length; i++) //{ // debugVertices[i] = new VertexPositionColor( // new Vector3( // (float)Math.Sin(i * (MathHelper.TwoPi / (debugVertices.Length - 1))), // (float)Math.Cos(i * (MathHelper.TwoPi / (debugVertices.Length - 1))), // -1), // Color.White); // var index = (i - 1) * 4; // debugIndices[index] = 0; // debugIndices[index + 1] = i; // debugIndices[index + 2] = i; // debugIndices[index + 3] = (i % (debugVertices.Length - 1)) + 1; //i < debugVertices.Length - 1 ? i + 1 : 1; //} _geometry = Content.Load<Model>("sphere"); _quad = new Quad(device); var shadowCameraEntity = kernel.Get<EntityDescription>(); shadowCameraEntity.AddBehaviour<View>(); _shadowView = shadowCameraEntity.Create().GetBehaviour<View>(); _shadowView.Camera = new Camera(); _lights = new List<LightData>(); _touchesNearPlane = new List<LightData>(); _touchesFarPlane = new List<LightData>(); _touchesBothPlanes = new List<LightData>(); _touchesNeitherPlane = new List<LightData>(); _depthGreater = new DepthStencilState() { DepthBufferEnable = true, DepthBufferWriteEnable = false, DepthBufferFunction = CompareFunction.GreaterEqual }; } public override void Initialise(Scene scene) { _geometryRenderer = new GeometryRenderer(scene.FindManagers<IGeometryProvider>()); base.Initialise(scene); } public override void Add(SpotLight behaviour) { var data = new LightData() { Light = behaviour }; _lights.Add(data); base.Add(behaviour); } public override bool Remove(SpotLight behaviour) { bool removed = base.Remove(behaviour); if (removed) { for (int i = 0; i < _lights.Count; i++) { if (_lights[i].Light == behaviour) { _lights.RemoveAt(i); break; } } } return removed; } public void Prepare(Renderer renderer) { _touchesNearPlane.Clear(); _touchesFarPlane.Clear(); _touchesBothPlanes.Clear(); _touchesNeitherPlane.Clear(); var frustum = renderer.Data.GetValue(Names.View.ViewFrustum); float falloffFactor = renderer.Data.GetOrCreate(Names.Lighting.AttenuationScale, 100).Value; _geometryLightingMaterial.Parameters["LightFalloffFactor"].SetValue(falloffFactor); _quadLightingMaterial.Parameters["LightFalloffFactor"].SetValue(falloffFactor); //float threshold = renderer.Data.Get("lighting_threshold", 1f / 100f).Value; //float adaptedLuminance = renderer.Data.Get<float>("adaptedluminance", 1).Value; //threshold = adaptedLuminance * threshold; foreach (var light in _lights) { if (!light.Light.Active) continue; light.Light.Direction = Vector3.Normalize(light.Light.Direction); //var luminance = Math.Max(light.Colour.X, Math.Max(light.Colour.Y, light.Colour.Z)); //light.range = (float)Math.Sqrt(luminance * falloffFactor / threshold); var bounds = new BoundingSphere(light.Light.Position, light.Light.Range); if (!bounds.Intersects(frustum)) continue; var near = bounds.Intersects(frustum.Near) == PlaneIntersectionType.Intersecting; var far = bounds.Intersects(frustum.Far) == PlaneIntersectionType.Intersecting; if (near && far) _touchesBothPlanes.Add(light); else if (near) _touchesNearPlane.Add(light); else if (far) _touchesFarPlane.Add(light); else _touchesNeitherPlane.Add(light); if (light.ShadowMap != null) { RenderTargetManager.RecycleTarget(light.ShadowMap); light.ShadowMap = null; } light.View = Matrix4x4.CreateLookAt( light.Light.Position, light.Light.Position + light.Light.Direction, light.Light.Up); // Vector3.UnitY || light.Light.Direction == -Vector3.UnitY ? Vector3.UnitX : Vector3.UnitY); light.Projection = Matrix4x4.CreatePerspectiveFieldOfView(light.Light.Angle, 1, 1, light.Light.Range); if (light.Light.ShadowResolution > 0) DrawShadowMap(renderer, light); } } private void DrawShadowMap(Renderer renderer, LightData data) { var light = data.Light; var target = RenderTargetManager.GetTarget(renderer.Device, light.ShadowResolution, light.ShadowResolution, SurfaceFormat.Single, DepthFormat.Depth24Stencil8, name: "spot light shadow map", usage: RenderTargetUsage.DiscardContents); renderer.Device.SetRenderTarget(target); renderer.Device.Clear(Color.Black); var resolution = renderer.Data.GetOrCreate<Vector2>(Names.View.Resolution); var previousResolution = resolution.Value; resolution.Value = new Vector2(light.ShadowResolution); renderer.Device.DepthStencilState = DepthStencilState.Default; renderer.Device.BlendState = BlendState.Opaque; renderer.Device.RasterizerState = RasterizerState.CullCounterClockwise; var view = renderer.Data.GetOrCreate<View>(Names.View.ActiveView); var previousView = view.Value; view.Value = _shadowView; _shadowView.Camera.View = data.View; _shadowView.Camera.Projection = data.Projection; _shadowView.Camera.NearClip = 1; _shadowView.Camera.FarClip = light.Range; _shadowView.Viewport = new Viewport(0, 0, light.ShadowResolution, light.ShadowResolution); _shadowView.SetMetadata(renderer.Data); _geometryRenderer.Draw("shadows_viewlength", renderer); data.ShadowMap = target; resolution.Value = previousResolution; previousView.SetMetadata(renderer.Data); view.Value = previousView; } public void Draw(Renderer renderer) { var metadata = renderer.Data; var device = renderer.Device; var part = _geometry.Meshes[0].MeshParts[0]; device.SetVertexBuffer(part.VertexBuffer); device.Indices = part.IndexBuffer; DrawGeometryLights(_touchesFarPlane, metadata, device); device.DepthStencilState = _depthGreater; device.RasterizerState = RasterizerState.CullClockwise; DrawGeometryLights(_touchesNearPlane, metadata, device); DrawGeometryLights(_touchesNeitherPlane, metadata, device); //foreach (var light in touchesNeitherPlane) //{ // device.DepthStencilState = stencilWritePass; // device.RasterizerState = RasterizerState.CullNone; // device.BlendState = colourWriteDisable; // device.Clear(ClearOptions.Stencil, Color.Transparent, 0, 0); // SetupLight(metadata, null, light); // DrawGeomery(nothingMaterial, metadata, device); // device.DepthStencilState = stencilCheckPass; // device.RasterizerState = RasterizerState.CullCounterClockwise; // device.BlendState = BlendState.Additive; // DrawGeomery(geometryLightingMaterial, metadata, device); //} device.DepthStencilState = DepthStencilState.None; device.RasterizerState = RasterizerState.CullCounterClockwise; foreach (var light in _touchesBothPlanes) { SetupLight(metadata, _quadLightingMaterial, light); _quad.Draw(_quadLightingMaterial, metadata); } } // ReSharper disable ParameterTypeCanBeEnumerable.Local private void DrawGeometryLights(List<LightData> lights, RendererMetadata metadata, GraphicsDevice device) // ReSharper restore ParameterTypeCanBeEnumerable.Local { foreach (var light in lights) { SetupLight(metadata, _geometryLightingMaterial, light); DrawGeomery(_geometryLightingMaterial, metadata, device); } } private void DrawGeomery(Material material, RendererMetadata metadata, GraphicsDevice device) { var part = _geometry.Meshes[0].MeshParts[0]; foreach (var pass in material.Begin(metadata)) { pass.Apply(); device.DrawIndexedPrimitives(PrimitiveType.TriangleList, part.VertexOffset, 0, part.NumVertices, part.StartIndex, part.PrimitiveCount); } } private void SetupLight(RendererMetadata metadata, Material material, LightData data) { var light = data.Light; Matrix4x4 view = metadata.GetValue(new TypedName<Matrix4x4>("view")); if (material != null) { var position = Vector3.Transform(light.Position, view); var direction = Vector3.TransformNormal(light.Direction, view); var up = Vector3.TransformNormal(light.Up, view); float angle = (float)Math.Cos(light.Angle / 2); if (light.Mask != null || light.ShadowResolution > 0) { var inverseView = metadata.GetValue(new TypedName<Matrix4x4>("inverseview")); var cameraToLightProjection = inverseView * data.View * data.Projection; material.Parameters["CameraViewToLightProjection"].SetValue(cameraToLightProjection); } material.Parameters["LightPosition"].SetValue(position); material.Parameters["LightDirection"].SetValue(-direction); material.Parameters["Angle"].SetValue(angle); material.Parameters["Falloff"].SetValue(light.Falloff); material.Parameters["Range"].SetValue(light.Range); material.Parameters["Colour"].SetValue(light.Colour); material.Parameters["EnableProjectiveTexturing"].SetValue(light.Mask != null); material.Parameters["Mask"].SetValue(light.Mask); material.Parameters["EnableShadows"].SetValue(light.ShadowResolution > 0); material.Parameters["ShadowMapSize"].SetValue(new Vector2(light.ShadowResolution, light.ShadowResolution)); material.Parameters["ShadowMap"].SetValue(data.ShadowMap); material.Parameters["LightFarClip"].SetValue(light.Range); var nearPlane = new Plane(light.Direction, Vector3.Dot(light.Direction, light.Position)); nearPlane = Plane.Normalize(nearPlane); nearPlane = Plane.Transform(nearPlane, view); material.Parameters["LightNearPlane"].SetValue(new Vector4(nearPlane.Normal, nearPlane.D)); } var world = Matrix4x4.CreateScale(light.Range / _geometry.Meshes[0].BoundingSphere.Radius) * Matrix4x4.CreateTranslation(light.Position); metadata.Set<Matrix4x4>(Names.Matrix.World, world); var projection = metadata.GetValue(new TypedName<Matrix4x4>("projection")); var worldview = world * view; metadata.Set(new TypedName<Matrix4x4>("worldview"), worldview); metadata.Set(new TypedName<Matrix4x4>("worldviewprojection"), worldview * projection); } } }
#region License /* * Copyright 2002-2010 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #endregion using System; using System.Collections; using System.IO; using System.Reflection; using log4net; using NUnit.Framework; using Spring.Core.IO; using Spring.Objects.Factory.Xml; using Spring.Threading; namespace Spring.Services.WindowsService.Common.Deploy.FileSystem { public class TestingDispatcher : IDeployEventDispatcher { ILog log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); IList events = new ArrayList(); ISync wait = new Semaphore(0); public void PerformDispatch (object sender) { lock (this) { log.Debug(String.Format("performing dispatch: {0} events", events.Count)); foreach (DeployEventArgs eventArgs in events) { if (DeployEvent != null) { log.Debug(String.Format("raising event {0}", eventArgs.EventType)); DeployEvent(sender, eventArgs); } } log.Debug(String.Format("clearing events list", events.Count)); events.Clear(); } } public void Dispatch (IDeployLocation sender, DeployEventType eventType, IApplication application) { lock (this) { log.Debug(String.Format("collecting event {0} (collected so far: {1})", eventType, events.Count)); events.Add(new DeployEventArgs(application, DeployEventType.ApplicationUpdated)); wait.Release(); } } public event DeployEventHandler DeployEvent; public void Dispose () { } public void Wait() { wait.Acquire(); } public void WaitAndPerformDispatch(object o) { Wait(); PerformDispatch(o); } } // problems with dynamic mocks: too many calls public class MockFileSystemEventFilter : IFileSystemEventFilter { public bool called = false; public bool accept; public MockFileSystemEventFilter (bool accept) { this.accept = accept; } public bool Filter (FileSystemEventArgs args) { called = true; return accept; } } [TestFixture] public class FilteringSupportTest { IList allow; IList disallow; FileSystemEventArgs anEvent; FilteringSupport support; FileSystemEventArgs springAssemblyEvent; IApplication application; [SetUp] public void SetUp () { allow = new ArrayList(); disallow = new ArrayList(); application = new Application(Path.GetFullPath("foo")); anEvent = new FileSystemEventArgs(WatcherChangeTypes.All, "foo", "foo.dll"); springAssemblyEvent = new FileSystemEventArgs(WatcherChangeTypes.All, Path.GetFullPath(SpringAssembliesDeployer.PrivateBinPathPrefix + "foo"), "foo.dll"); support = new FilteringSupport(allow, disallow, SpringAssembliesDeployer.DisallowFilter); } [Test] public void AnEventIsAcceptedIfThereIsNoFilterDefined () { Assert.IsTrue(support.Accept(anEvent), "event not accepted with no filter defined"); } [Test] public void EventsRelatedToSpringAssembliesAreNeverAccepted () { Assert.IsFalse(support.Accept(springAssemblyEvent), "spring assemblies event accepted"); allow.Add(new RegularExpressionFilter( String.Format ("**/{0}*/**", SpringAssembliesDeployer.PrivateBinPathPrefix), true)); Assert.IsFalse(support.Accept(springAssemblyEvent), "spring assemblies event accepted"); } [Test] public void AnEventIsAcceptedIfAtLeastOneAllowFilterFiltersIt () { allow.Add(new MockFileSystemEventFilter(false)); Assert.IsFalse(support.Accept(anEvent), "allowed when no allow filter allows"); allow.Add(new MockFileSystemEventFilter(true)); Assert.IsTrue(support.Accept(anEvent), "not allowed when one allow filter allows"); allow.Add(new MockFileSystemEventFilter(false)); Assert.IsTrue(support.Accept(anEvent), "not allowed when one allow filter allows"); } [Test] public void AnEventIsNotAcceptedIfAtLeastOneDisallowFilterFiltersIt () { disallow.Add(new MockFileSystemEventFilter(false)); Assert.IsTrue(support.Accept(anEvent), "disallowed when no disallow filter disallows"); disallow.Add(new MockFileSystemEventFilter(true)); Assert.IsFalse(support.Accept(anEvent), "allowed when one disallow filter disallows"); disallow.Add(new MockFileSystemEventFilter(false)); Assert.IsFalse(support.Accept(anEvent), "allowed when one disallow filter disallows"); } [Test] public void ADisallowingFilterDominatesAllowFilters () { disallow.Add(new MockFileSystemEventFilter(true)); allow.Add(new MockFileSystemEventFilter(false)); Assert.IsFalse(support.Accept(anEvent), "event accepted when a disallow filter disallows"); allow.Add(new MockFileSystemEventFilter(true)); Assert.IsFalse(support.Accept(anEvent), "event accepted when a disallow filter disallows"); } [Test] public void WithNoDisallowingFilterItDependsOnThePresenceOfAnAllowingFilters () { disallow.Add(new MockFileSystemEventFilter(false)); allow.Add(new MockFileSystemEventFilter(false)); Assert.IsFalse(support.Accept(anEvent), "event accepted when a disallow filter disallows"); allow.Add(new MockFileSystemEventFilter(true)); Assert.IsTrue(support.Accept(anEvent), "event not accepted when an allow filter allows and no disallow filter disallows"); disallow.Add(new MockFileSystemEventFilter(true)); Assert.IsFalse(support.Accept(anEvent), "event accepted when a disallow filter disallows"); } [Test] public void IgnoreEventsRegardingTheDirectoryOfTheApplication() { FileSystemEventArgs e = new FileSystemEventArgs(WatcherChangeTypes.All, Directory.GetCurrentDirectory(), "foo"); Assert.IsFalse(support.IsApplicationEvent(e, application, true)); e = new FileSystemEventArgs(WatcherChangeTypes.All, Path.GetFullPath("foo"), "foo.dll"); Assert.IsTrue(support.IsApplicationEvent(e, application, true)); e = new FileSystemEventArgs(WatcherChangeTypes.All, Directory.GetCurrentDirectory(), "foo/foo.dll"); Assert.IsTrue(support.IsApplicationEvent(e, application, true)); e = new FileSystemApplicationWatcher(application, new NullWatcherConfigurer()).GenericApplicationEvent; Assert.IsTrue(support.IsApplicationEvent(e, application, true)); } [Test] public void CanDecideIfAnEventIsAnApplicationEventIgnoringCase() { FileSystemEventArgs e = new FileSystemEventArgs(WatcherChangeTypes.All, Directory.GetCurrentDirectory(), "FOO/FOO.DLL"); application = new Application(Path.GetFullPath("foo").ToUpper()); Assert.IsTrue(support.IsApplicationEvent(e, application, true), "failed to match without case"); e = new FileSystemEventArgs(WatcherChangeTypes.All, "C:/", "foo/foo.dll"); Assert.IsFalse(support.IsApplicationEvent(e, application, false), "unexpected match with case"); } } [TestFixture] public class FileSystemApplicationWatcherTest { ILog log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); Latch eventLatch; string appName, anotherAppName; string appFullPath, anotherAppFullPath; private IDeployEventDispatcher dispatcher; private FileSystemApplicationWatcher watcher; private IApplication application; private bool dispatched; private ISync controlledEventLatch; [SetUp] public void SetUp () { TestUtils.ConfigureLog4Net(); dispatched = false; appName = Guid.NewGuid().ToString(); appFullPath = Path.GetFullPath(appName); anotherAppName = Guid.NewGuid().ToString(); anotherAppFullPath = Path.GetFullPath(anotherAppName); Directory.CreateDirectory(appFullPath); Directory.CreateDirectory(anotherAppFullPath); eventLatch = new Latch(); controlledEventLatch = new Latch(); dispatcher = new ForwardingDeployEventDispatcher(); application = new Application(appFullPath); dispatcher.DeployEvent += new DeployEventHandler(dispatcher_DeployEvent); watcher = new FileSystemApplicationWatcher(application); } [TearDown] public void TearDown () { if (watcher != null) { watcher.StopWatching(); watcher.Dispose(); } dispatcher.DeployEvent -= new DeployEventHandler(dispatcher_DeployEvent); Directory.Delete(appFullPath, true); Directory.Delete(anotherAppFullPath, true); } [Test] public void ProduceUpdateEventsForWatchedApplication() { TestingDispatcher controlledDispatcher = new TestingDispatcher(); controlledDispatcher.DeployEvent += new DeployEventHandler(controlledDispatcher_DeployEvent); watcher.StartWatching(controlledDispatcher); string subDir = Path.Combine(appFullPath, "foo"); Directory.CreateDirectory(subDir); controlledDispatcher.WaitAndPerformDispatch(null); using (File.Create(Path.Combine (subDir, "foo.bar"))) {} controlledDispatcher.WaitAndPerformDispatch(null); } [Test] public void DoNotProduceUpdateEventsForOtherApplications() { TestingDispatcher controlledDispatcher = new TestingDispatcher(); controlledDispatcher.DeployEvent += new DeployEventHandler(controlledDispatcher_DeployEvent); watcher.StartWatching(controlledDispatcher); string subDir = Path.Combine(anotherAppFullPath, "foo"); Directory.CreateDirectory(subDir); using (File.Create(Path.Combine (subDir, "foo.bar"))) {} dispatched = false; controlledDispatcher.PerformDispatch(null); Assert.IsFalse(dispatched); } [Test] public void UsesEventFiltersToAllowAnEventToPropagated () { MockFileSystemEventFilter filter = new MockFileSystemEventFilter(true); watcher.StartWatching(dispatcher); watcher.AddAllowFilter (filter); string subDir = Path.Combine(appFullPath, "foo"); Directory.CreateDirectory(subDir); using (File.Create(Path.Combine (appFullPath, "foo.bar"))) {} eventLatch.Acquire(); Assert.IsTrue(filter.called); } [Test] public void CanBeConfiguredToIncludeOrExcludePathsForEvents () { string simple = "Data/Xml/watcher-simple.xml"; XmlObjectFactory f = new XmlObjectFactory(new FileSystemResource(simple)); f.RegisterSingleton(DefaultApplicationWatcherFactory.InjectedApplicationName, application); watcher = f["watcher"] as FileSystemApplicationWatcher; Assert.IsNotNull(watcher, String.Format("test file [{0}] should define a file sistem resource!", simple)); Assert.AreEqual(1, watcher.Excludes.Count); Assert.AreEqual(1, watcher.Includes.Count); watcher.StartWatching(dispatcher); // propagated string subDir = Path.Combine(appFullPath, "foo"); Directory.CreateDirectory(subDir); using (File.Create(Path.Combine (subDir, "foo.bar"))) {} eventLatch.Acquire(); Assert.IsFalse(dispatched); } [Test] public void ByDefaultTheCaseIsIngoredButCanBeConfigured () { Assert.IsTrue(watcher.IgnoreCase); watcher.IgnoreCase = false; Assert.IsFalse(watcher.IgnoreCase); } private void dispatcher_DeployEvent(object sender, DeployEventArgs args) { log.Debug("releasing latch"); eventLatch.Release(); log.Debug("latch released"); } private void controlledDispatcher_DeployEvent(object sender, DeployEventArgs args) { dispatched = true; log.Debug(String.Format("application = {0}", args.Application.FullPath)); log.Debug(String.Format("anotherAppFullPath = {0}", anotherAppFullPath)); controlledEventLatch.Release(); } } }
/* * CTSDecrypt.cs - Implementation of the * "System.Security.Cryptography.CTSDecrypt" class. * * Copyright (C) 2002 Southern Storm Software, Pty Ltd. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ namespace System.Security.Cryptography { #if CONFIG_CRYPTO using System; using Platform; // Decryption process for "Cipher Text Stealing" mode (CTS). internal sealed class CTSDecrypt { // Initialize a "CryptoAPITransform" object for CTS decryption. public static void Initialize(CryptoAPITransform transform) { transform.tempBuffer = new byte [transform.blockSize * 3]; transform.tempSize = 0; } // Transform an input block into an output block. public static int TransformBlock(CryptoAPITransform transform, byte[] inputBuffer, int inputOffset, int inputCount, byte[] outputBuffer, int outputOffset) { int blockSize = transform.blockSize; byte[] iv = transform.iv; IntPtr state = transform.state; int offset = outputOffset; byte[] tempBuffer = transform.tempBuffer; int tempSize = transform.tempSize; int index; // Process all of the data in the input. We need to keep // the last two blocks for the finalization process. while(inputCount >= blockSize) { // If the temporary buffer is full, then flush a block // through the cipher in CBC mode. if(tempSize > blockSize) { // Decrypt the ciphertext block and XOR with the IV. CryptoMethods.Decrypt(state, tempBuffer, blockSize, tempBuffer, 0); for(index = blockSize - 1; index >= 0; --index) { tempBuffer[index] ^= iv[index]; } // Copy the original ciphertext to the IV. Array.Copy(tempBuffer, blockSize, iv, 0, blockSize); // Copy the plaintext into place. Array.Copy(tempBuffer, 0, outputBuffer, offset, blockSize); // Advance to the next output block. offset += blockSize; // Shift the second block down to the first position. Array.Copy(tempBuffer, blockSize * 2, tempBuffer, blockSize, blockSize); tempSize -= blockSize; } // Copy the next block into the temporary buffer. Array.Copy(inputBuffer, inputOffset, tempBuffer, tempSize + blockSize, blockSize); inputOffset += blockSize; inputCount -= blockSize; tempSize += blockSize; } transform.tempSize = tempSize; // Finished. return offset - outputOffset; } // Transform the final input block. public static byte[] TransformFinalBlock(CryptoAPITransform transform, byte[] inputBuffer, int inputOffset, int inputCount) { int blockSize = transform.blockSize; byte[] iv = transform.iv; IntPtr state = transform.state; int offset; byte[] tempBuffer = transform.tempBuffer; byte[] outputBuffer; int tempSize; int index; // Allocate the output buffer. outputBuffer = new byte [inputCount + transform.tempSize]; // Process as many full blocks as possible. index = inputCount - (inputCount % blockSize); offset = TransformBlock(transform, inputBuffer, inputOffset, index, outputBuffer, 0); inputOffset += index; inputCount -= index; // Flush the first block if we need the extra space. tempSize = transform.tempSize; if(tempSize > blockSize && inputCount > 0) { // Decrypt the ciphertext block and XOR with the IV. CryptoMethods.Decrypt(state, tempBuffer, blockSize, tempBuffer, 0); for(index = blockSize - 1; index >= 0; --index) { tempBuffer[index] ^= iv[index]; } // Copy the original ciphertext to the IV. Array.Copy(tempBuffer, blockSize, iv, 0, blockSize); // Copy the plaintext into place. Array.Copy(tempBuffer, 0, outputBuffer, offset, blockSize); // Advance to the next output block. offset += blockSize; // Shift the second block down to the first position. Array.Copy(tempBuffer, blockSize * 2, tempBuffer, blockSize, blockSize); tempSize -= blockSize; } // Copy the remainder of the data into the temporary buffer. Array.Copy(inputBuffer, inputOffset, tempBuffer, tempSize + blockSize, inputCount); tempSize += inputCount; // "Applied Cryptography" describes Cipher Text Stealing // as taking two blocks to generate the short end-point. // If we less than one block, then use CFB instead. if(tempSize < blockSize) { // Decrypt the single block in CFB mode. CryptoMethods.Encrypt(transform.state2, iv, 0, iv, 0); for(index = 0; index < tempSize; ++index) { outputBuffer[offset + index] = (byte)(iv[index] ^ tempBuffer[index + blockSize]); } } else { // Decrypt the second last ciphertext block. CryptoMethods.Decrypt(state, tempBuffer, blockSize, tempBuffer, blockSize); // Rebuild the ciphertext for the last block. for(index = inputCount; index < blockSize; ++index) { tempBuffer[blockSize * 2 + index] = tempBuffer[blockSize + index]; } // Get the last plaintext block from the second // last ciphertext block. for(index = inputCount - 1; index >= 0; --index) { outputBuffer[offset + blockSize + index] = (byte)(tempBuffer[blockSize + index] ^ tempBuffer[blockSize * 2 + index]); } // Decrypt the last ciphertext block that we rebuilt. CryptoMethods.Decrypt(state, tempBuffer, blockSize * 2, tempBuffer, 0); // XOR the block with the IV to get the second // last plaintext block. for(index = blockSize - 1; index >= 0; --index) { outputBuffer[offset + index] = (byte)(iv[index] ^ tempBuffer[index]); } } // Finished. return outputBuffer; } }; // class CTSDecrypt #endif // CONFIG_CRYPTO }; // namespace System.Security.Cryptography
/************************************************************************************ Filename : OSPManager.cs Content : Interface into the Oculus Spatializer Plugin Created : Novemnber 4, 2014 Authors : Peter Giokaris Copyright : Copyright 2014 Oculus VR, Inc. All Rights reserved. Licensed under the Oculus VR Rift SDK License Version 3.1 (the "License"); you may not use the Oculus VR Rift SDK except in compliance with the License, which is provided at the time of installation or download, or which otherwise accompanies this software in either electronic or hard copy form. You may obtain a copy of the License at http://www.oculusvr.com/licenses/LICENSE-3.1 Unless required by applicable law or agreed to in writing, the Oculus VR SDK distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ************************************************************************************/ // Add Minor releases as they come on-line #if UNITY_5_0 || UNITY_5_1 || UNITY_5_2 #define UNITY5 #elif UNITY_6_0 #error support this! #endif using UnityEngine; using System; using System.Runtime.InteropServices; using System.Collections.Generic; //------------------------------------------------------------------------------------- // ***** OSPManager // /// <summary> /// OSPManager interfaces into the Oculus Spatializer. This component should be added /// into the scene once. /// /// </summary> public class OSPManager : MonoBehaviour { public const string strOSP = "OculusSpatializerPlugin"; // * * * * * * * * * * * * * // RoomModel - Used to enable and define simple box room for early reflections [StructLayout(LayoutKind.Sequential)] public struct RoomModel { public bool Enable; public int ReflectionOrder; public float DimensionX; public float DimensionY; public float DimensionZ; public float Reflection_K0; public float Reflection_K1; public float Reflection_K2; public float Reflection_K3; public float Reflection_K4; public float Reflection_K5; public bool ReverbOn; } // * * * * * * * * * * * * * // Import functions [DllImport(strOSP)] private static extern bool OSP_Init(int sample_rate, int buffer_size); [DllImport(strOSP)] private static extern bool OSP_Exit(); [DllImport(strOSP)] private static extern bool OSP_UpdateRoomModel(ref RoomModel rm); [DllImport(strOSP)] private static extern void OSP_SetReflectionsRangeMax(float range); [DllImport(strOSP)] private static extern int OSP_AquireContext(); [DllImport(strOSP)] private static extern void OSP_ReturnContext(int context); [DllImport(strOSP)] private static extern bool OSP_GetBypass(); [DllImport(strOSP)] private static extern void OSP_SetBypass(bool bypass); [DllImport(strOSP)] private static extern bool OSP_GetUseSimple(); [DllImport(strOSP)] private static extern void OSP_SetUseSimple(bool useSimple); [DllImport(strOSP)] private static extern void OSP_SetGlobalScale(float globalScale); [DllImport(strOSP)] private static extern bool OSP_GetUseInverseSquareAttenuation(); [DllImport(strOSP)] private static extern void OSP_SetUseInverseSquareAttenuation(bool useInvSq); [DllImport(strOSP)] private static extern void OSP_SetFalloffRangeGlobal(float nearRange, float farRange); [DllImport(strOSP)] private static extern void OSP_SetFalloffRangeLocal(int contextAndSound, float nearRange, float farRange); [DllImport(strOSP)] private static extern void OSP_SetGain(float gain); [DllImport(strOSP)] private static extern void OSP_SetFrequencyHint(int context, int hint); [DllImport(strOSP)] private static extern void OSP_SetDisableReflectionsOnSound(int contextAndSound, bool disable); [DllImport(strOSP)] private static extern float OSP_GetDrainTime(int context); [DllImport(strOSP)] private static extern void OSP_SetPositonRelXYZ(int context, float x, float y, float z); [DllImport(strOSP)] private static extern void OSP_Spatialize(int context, float[] ioBuf, bool useInvSq, float near, float far); // * * * * * * * * * * * * * // Public members private int bufferSize = 512; // Do not expose at this time public int BufferSize { get{return bufferSize; } set{bufferSize = value;} } private int sampleRate = 48000; // Do not expose at this time public int SampleRate { get{return sampleRate; } set{sampleRate = value;} } [SerializeField] private bool bypass = false; public bool Bypass { get{return OSP_GetBypass(); } set{bypass = value; OSP_SetBypass(bypass);} } [SerializeField] private bool useSimple = false; public bool UseSimple { get{ return useSimple; } set{useSimple = value; OSP_SetUseSimple(useSimple);} } [SerializeField] private float globalScale = 1.0f; public float GlobalScale { get{return globalScale; } set { globalScale = Mathf.Clamp (value, 0.00001f, 10000.0f); OSP_SetGlobalScale(globalScale); } } [SerializeField] private float gain = 0.0f; public float Gain { get{return gain; } set { gain = Mathf.Clamp(value, -24.0f, 24.0f); OSP_SetGain(gain); } } [SerializeField] private bool useInverseSquare = false; public bool UseInverseSquare { get{return useInverseSquare;} set { useInverseSquare = value; OSP_SetUseInverseSquareAttenuation(useInverseSquare); } } [SerializeField] private float falloffNear = 10.0f; public float FalloffNear { get{return falloffNear; } set { falloffNear = Mathf.Clamp(value, 0.0f, 1000000.0f); OSP_SetFalloffRangeGlobal(falloffNear, falloffFar); } } [SerializeField] private float falloffFar = 1000.0f; public float FalloffFar { get{return falloffFar; } set { falloffFar = Mathf.Clamp(value, 0.0f, 1000000.0f); OSP_SetFalloffRangeGlobal(falloffNear, falloffFar); } } // Access the values without calling through properties (and wrecking local state) public void GetNearFarFalloffValues (ref float n, ref float f) { n = falloffNear; f = falloffFar; } //---------------------- // Reflection parameters private bool dirtyReflection; [SerializeField] private bool enableReflections = false; public bool EnableReflections { get{return enableReflections; } set{enableReflections = value; dirtyReflection = true;} } [SerializeField] private bool enableReverb = false; public bool EnableReverb { get{return enableReverb; } set{enableReverb = value; dirtyReflection = true;} } [SerializeField] private Vector3 dimensions = new Vector3 (0.0f, 0.0f, 0.0f); public Vector3 Dimensions { get{return dimensions; } set{dimensions = value; dimensions.x = Mathf.Clamp (dimensions.x, 1.0f, 200.0f); dimensions.y = Mathf.Clamp (dimensions.y, 1.0f, 200.0f); dimensions.z = Mathf.Clamp (dimensions.z, 1.0f, 200.0f); dirtyReflection = true;} } [SerializeField] private Vector2 rK01 = new Vector2(0.0f, 0.0f); public Vector2 RK01 { get{return rK01; } set{rK01 = value; rK01.x = Mathf.Clamp (rK01.x, 0.0f, 0.97f); rK01.y = Mathf.Clamp (rK01.y, 0.0f, 0.97f); dirtyReflection = true;} } [SerializeField] private Vector2 rK23 = new Vector2(0.0f, 0.0f); public Vector2 RK23 { get{return rK23; } set{rK23 = value; rK23.x = Mathf.Clamp (rK23.x, 0.0f, 0.95f); rK23.y = Mathf.Clamp (rK23.y, 0.0f, 0.95f); dirtyReflection = true;} } [SerializeField] private Vector2 rK45 = new Vector2(0.0f, 0.0f); public Vector2 RK45 { get{return rK45; } set{rK45 = value; rK45.x = Mathf.Clamp (rK45.x, 0.0f, 0.95f); rK45.y = Mathf.Clamp (rK45.y, 0.0f, 0.95f); dirtyReflection = true;} } // * * * * * * * * * * * * * // Public members // * * * * * * * * * * * * * // Private members //private List<OSPAudioSource> audioSources; // * * * * * * * * * * * * * // Static members // Our instance to allow this script to be called without a direct connection. private static bool sOSPInit = false; // Some functions in OSPAudioSource require probing into OSPManager, so they can // interface through this static member. public static OSPManager sInstance = null; // * * * * * * * * * * * * * // MonoBehaviour overrides /// <summary> /// Awake this instance. /// </summary> void Awake () { // We can only have one instance of OSPManager in a scene (use this for local property query) if(sInstance == null) { sInstance = this; } else { Debug.LogWarning (System.String.Format ("OSPManager-Awake: Only one instance of OSPManager can exist in the scene.")); return; } int samplerate; int bufsize; int numbuf; #if (!UNITY5) // Used to override samplerate and buffer size with optimal values bool setvalues = true; // OSX is picky with samplerate and buffer sizes, so leave it alone #if (UNITY_STANDALONE_OSX || UNITY_EDITOR_OSX) setvalues = false; #endif #endif // Get the current sample rate samplerate = AudioSettings.outputSampleRate; // Get the current buffer size and number of buffers AudioSettings.GetDSPBufferSize (out bufsize, out numbuf); Debug.LogWarning (System.String.Format ("OSP: Queried SampleRate: {0:F0} BufferSize: {1:F0}", samplerate, bufsize)); // We only know if OpenSL has been enabled if sample rate is 48K. // We need to check another setting. #if (UNITY_ANDROID && !UNITY_EDITOR) if((samplerate == 48000)) { Debug.LogWarning("OSP: Android OpenSL ENABLED (based on 48KHz sample-rate)"); #if (!UNITY5) setvalues = false; #endif } else { Debug.LogWarning("OSP: Android OpenSL DISABLED"); } #endif // We will only set values IF we are not Unity 5 (the ability to set DSP settings does not exist) // NOTE: Unity 4 does not switch DSP buffer sizes using ProjectSettings->Audio, but Unity 5 does. // At some point along Unity 5 maturity, the functionality to set DSP values directly might be removed. #if (!UNITY5) if(setvalues == true) { // NOTE: When setting DSP values in Unity 4, there may be a situation where using PlayOnAwake on // non-spatitalized audio objects will fail to play. // Uncomment this code for achieving the best possibly latency with spatialized audio, but // USE AT YOUR OWN RISK! /* // Set the ideal sample rate AudioSettings.outputSampleRate = SampleRate; // Get the sample rate again (it may not take, depending on platform) samplerate = AudioSettings.outputSampleRate; // Set the ideal buffer size AudioSettings.SetDSPBufferSize (BufferSize, numbuf); // Get the current buffer size and number of buffers again AudioSettings.GetDSPBufferSize (out bufsize, out numbuf); */ } #endif Debug.LogWarning (System.String.Format ("OSP: sample rate: {0:F0}", samplerate)); Debug.LogWarning (System.String.Format ("OSP: buffer size: {0:F0}", bufsize)); Debug.LogWarning (System.String.Format ("OSP: num buffers: {0:F0}", numbuf)); sOSPInit = OSP_Init(samplerate, bufsize); // Set global variables not set to dirty updates OSP_SetBypass (bypass); OSP_SetUseSimple (useSimple); OSP_SetGlobalScale (globalScale); OSP_SetGain (gain); OSP_SetFalloffRangeGlobal (falloffNear, falloffFar); // Update reflections for the first time dirtyReflection = true; } /// <summary> /// Start this instance. /// Note: make sure to always have a Start function for classes that have editor scripts. /// </summary> void Start() { } /// <summary> /// Run processes that need to be updated in our game thread /// </summary> void Update() { // Update reflections if (dirtyReflection == true) { UpdateEarlyReflections(); dirtyReflection = false; } } /// <summary> /// Raises the destroy event. /// </summary> void OnDestroy() { sOSPInit = false; // PGG Do not call, faster (but stuck with initial buffer resolution) //OSP_Exit(); } // * * * * * * * * * * * * * // Public Functions /// <summary> /// Inited - Check to see if system has been initialized /// </summary> /// <returns><c>true</c> if is initialized; otherwise, <c>false</c>.</returns> public static bool IsInitialized() { return sOSPInit; } /// <summary> /// Gets a spatializer context for a sound. /// </summary> /// <returns>The context.</returns> public static int AquireContext () { return OSP_AquireContext(); } /// <summary> /// Releases the context for a sound. /// </summary> /// <param name="context">Context.</param> public static void ReleaseContext(int context) { // Drop back into OSP OSP_ReturnContext (context); } /// <summary> /// Gets the bypass. Used by OSPAudioSource (cannot be written to; used for /// getting global bypass state). /// </summary> /// <returns><c>true</c>, if bypass was gotten, <c>false</c> otherwise.</returns> public static bool GetBypass() { return OSP_GetBypass (); } /// <summary> /// Gets the sinple override. /// </summary> /// <returns><c>true</c>, if fast override was used, <c>false</c> otherwise.</returns> /// <param name="useSimple">If set to <c>true</c> use simple.</param> public static bool GetUseSimple() { return OSP_GetUseSimple(); } /// <summary> /// Sets the use simple. /// </summary> /// <returns><c>true</c>, if use simple was gotten, <c>false</c> otherwise.</returns> public static void SetUseSimple(bool use) { OSP_SetUseSimple(use); } /// <summary> /// Sets a value indicating whether this <see cref="OSPManager"/> get use inverse square attenuation. /// </summary> /// <value><c>true</c> if get use inverse square attenuation; otherwise, <c>false</c>.</value> public static bool GetUseInverseSquareAttenuation() { return OSP_GetUseInverseSquareAttenuation(); } /// <summary> /// Sets the frequency hint. /// </summary> /// <param name="context">Context.</param> /// <param name="hint">Hint.</param> public static void SetFrequencyHint(int context, int hint) { OSP_SetFrequencyHint (context, hint); } /// <summary> /// Sets the disable reflections on sound. /// </summary> /// <param name="context">Context.</param> /// <param name="enable">If set to <c>true</c> enable.</param> public static void SetDisableReflectionsOnSound(int context, bool disable) { OSP_SetDisableReflectionsOnSound(context, disable); } /// <summary> /// Gets the drain time, based on reflection room size. /// </summary> /// <returns>The drain time.</returns> public static float GetDrainTime(int context) { return OSP_GetDrainTime (context); } /// <summary> /// Sets the position of the sound relative to the listener. /// </summary> /// <param name="context">Context.</param> /// <param name="x">The x coordinate.</param> /// <param name="y">The y coordinate.</param> /// <param name="z">The z coordinate.</param> public static void SetPositionRel(int context, float x, float y, float z) { if (sOSPInit == false) return; OSP_SetPositonRelXYZ (context, x, y, z); } /// <summary> /// Spatialize the specified ioBuf using context. /// </summary> /// <param name="ioBuf">Io buffer.</param> /// <param name="context">Context.</param> public static void Spatialize (int context, float[] ioBuf, bool useInvSq, float near, float far) { if (sOSPInit == false) return; OSP_Spatialize (context, ioBuf, useInvSq, near, far); } /// <summary> /// Sets the falloff range local. /// </summary> /// <param name="contextAndSound">Context and sound.</param> /// <param name="nearRange">Near range.</param> /// <param name="farRange">Far range.</param> public static void SetFalloffRangeLocal(int contextAndSound, float nearRange, float farRange) { OSP_SetFalloffRangeLocal(contextAndSound, nearRange, farRange); } /// <summary> /// Adds the active audio source. /// </summary> /// <param name="source">Source.</param> public void AddActiveAudioSource(OSPAudioSource source) { // audioSources.Add(source); } /// <summary> /// Removes the active audio source. /// </summary> /// <param name="source">Source.</param> public void RemoveActiveAudioSource(OSPAudioSource source) { // audioSources.Remove(source); } // * * * * * * * * * * * * * // Private Functions /// <summary> /// Updates the early reflections. /// </summary> void UpdateEarlyReflections() { RoomModel rm; rm.Enable = enableReflections; rm.ReverbOn = enableReverb; rm.ReflectionOrder = 0; // Unused rm.DimensionX = dimensions.x; rm.DimensionY = dimensions.y; rm.DimensionZ = dimensions.z; rm.Reflection_K0 = rK01.x; rm.Reflection_K1 = rK01.y; rm.Reflection_K2 = rK23.x; rm.Reflection_K3 = rK23.y; rm.Reflection_K4 = rK45.x; rm.Reflection_K5 = rK45.y; OSP_UpdateRoomModel (ref rm); } }
using System; using System.Collections; using WinForms = System.Windows.Forms; using Drawing = System.Drawing; using NBM.Plugin; // 4/6/03 namespace NBM { /// <summary> /// Describes a tree node protocol for the contact tree view. /// </summary> public class ProtocolTreeNode : TreeNodeEx, IProtocolListener { private Protocol protocol; private OfflineTreeNode offlineTreeNode; private Hashtable friendsTable = new Hashtable(); private int protocolIndex = 0; /// <summary> /// Associated protocol /// </summary> public Protocol Protocol { get { return protocol; } } /// <summary> /// Constructs a ProtocolTreeNode /// </summary> /// <param name="protocol"></param> /// <param name="offlineTreeNode"></param> /// <param name="protocolIndex">Each protocol is assigned a number, for image indexing purposes. This is that number.</param> public ProtocolTreeNode(Protocol protocol, OfflineTreeNode offlineTreeNode, int protocolIndex) : base(protocol.Name) { this.protocol = protocol; this.offlineTreeNode = offlineTreeNode; this.protocolIndex = protocolIndex; this.protocol.AddListener(this); SetProtocolNodeImageIndex(protocol.Settings.Status); } /// <summary> /// Calculates the image index appropriate for the friend's status icon. /// </summary> /// <param name="status"></param> /// <returns></returns> private int GetFriendNodeImageIndex(OnlineStatus status) { switch (status) { default: case OnlineStatus.Online: return 0; case OnlineStatus.Idle: case OnlineStatus.Away: return 1; case OnlineStatus.Busy: return 2; case OnlineStatus.AppearOffline: case OnlineStatus.Offline: return 3 + ((protocolIndex+1) * 9); } } /// <summary> /// Sets the protocol image index for the user status. /// </summary> /// <param name="status"></param> private void SetProtocolNodeImageIndex(OnlineStatus status) { int imageIndex = 3 + (protocolIndex * 9); switch (status) { default: case OnlineStatus.Online: imageIndex += 1; break; case OnlineStatus.Idle: case OnlineStatus.Away: imageIndex += 2; break; case OnlineStatus.Busy: imageIndex += 3; break; case OnlineStatus.AppearOffline: case OnlineStatus.Offline: imageIndex += 4; break; } this.ExpandedImageIndex = imageIndex; this.CollapsedImageIndex = imageIndex + 4; } /// <summary> /// Sets the tooltip for the apporpriate node. /// </summary> /// <param name="node"></param> /// <param name="displayName"></param> /// <param name="status"></param> /// <param name="email"></param> private void SetFriendToolTip(TreeNodeEx node, string displayName, OnlineStatus status, string email) { // change tool tip text node.ToolTipText = "Name: " + displayName + "\nStatus: " + status.ToString() + "\nEmail: " + email; } #region ProtocolListener events #region Invoke methods private delegate void FriendTreeHandler(TreeNodeEx parent, TreeNodeEx child); private static void AddFriendToTree(TreeNodeEx parent, TreeNodeEx child) { parent.Nodes.Add(child); } private static void RemoveFriendFromTree(TreeNodeEx parent, TreeNodeEx child) { parent.Nodes.Remove(child); } #endregion /// <summary> /// /// </summary> /// <param name="friend"></param> public void OnFriendAdd(Friend friend) { FriendTreeNode node = new FriendTreeNode(this.protocol, friend); node.ContextMenu = new FriendMenu(this.protocol, friend); node.CollapsedImageIndex = node.ExpandedImageIndex = this.GetFriendNodeImageIndex(friend.Status); SetFriendToolTip(node, friend.DisplayName, friend.Status, friend.EmailAddress); if (friend.Status == OnlineStatus.Offline) offlineTreeNode.Nodes.Add(node); else this.Nodes.Add(node); this.friendsTable.Add(friend, node); } /// <summary> /// /// </summary> /// <param name="friend"></param> public void OnFriendRemove(Friend friend) { FriendTreeNode node = (FriendTreeNode)this.friendsTable[friend]; node.Remove(); this.friendsTable.Remove(friend); } /// <summary> /// /// </summary> /// <param name="friend"></param> /// <param name="newStatus"></param> public void OnFriendChangeStatus(Friend friend, OnlineStatus newStatus) { FriendTreeNode node = (FriendTreeNode)this.friendsTable[friend]; if (node != null) { // friend went offline, transfer him to the offline list if (friend.Status != OnlineStatus.Offline && newStatus == OnlineStatus.Offline) { // move treenode to offline tree this.TreeView.Invoke(new FriendTreeHandler(RemoveFriendFromTree), new object[] { this, node }); this.TreeView.Invoke(new FriendTreeHandler(AddFriendToTree), new object[] { this.offlineTreeNode, node }); } // else friend has just came online from being offline else if (friend.Status == OnlineStatus.Offline && newStatus != OnlineStatus.Offline) { // move treenode from offline tree to protocol tree this.TreeView.Invoke(new FriendTreeHandler(RemoveFriendFromTree), new object[] { this.offlineTreeNode, node }); this.TreeView.Invoke(new FriendTreeHandler(AddFriendToTree), new object[] { this, node }); } node.CollapsedImageIndex = node.ExpandedImageIndex = GetFriendNodeImageIndex(newStatus); SetFriendToolTip(node, friend.DisplayName, newStatus, friend.EmailAddress); } } /// <summary> /// /// </summary> /// <param name="friend"></param> /// <param name="newName"></param> public void OnFriendChangeDisplayName(Friend friend, string newName) { FriendTreeNode node = (FriendTreeNode)this.friendsTable[friend]; node.Text = newName; SetFriendToolTip(node, newName, friend.Status, friend.EmailAddress); } /// <summary> /// /// </summary> public void OnBeginConnect() { this.FontStyle = Drawing.FontStyle.Italic; } /// <summary> /// /// </summary> public void OnConnect() { this.FontStyle = Drawing.FontStyle.Bold; } /// <summary> /// Called when the connection is canceled /// </summary> public void OnConnectCanceled() { this.FontStyle = Drawing.FontStyle.Regular; } /// <summary> /// /// </summary> /// <param name="forced"></param> public void OnDisconnect(bool forced) { this.FontStyle = Drawing.FontStyle.Regular; this.friendsTable.Clear(); this.Nodes.Clear(); for (int i=0; i<this.offlineTreeNode.Nodes.Count; ++i) { FriendTreeNode node = (FriendTreeNode)this.offlineTreeNode.Nodes[i]; if (node.Protocol.Name == this.protocol.Name) { node.Remove(); --i; } } } /// <summary> /// /// </summary> /// <param name="friend"></param> /// <param name="opCompleteEvent"></param> /// <param name="tag"></param> public void OnInvitedToConversation(Friend friend, OperationCompleteEvent opCompleteEvent, object tag) { } /// <summary> /// /// </summary> /// <param name="status"></param> public void OnChangeStatus(OnlineStatus status) { SetProtocolNodeImageIndex(status); } /// <summary> /// /// </summary> /// <param name="username"></param> public void OnAddFriendToList(string username) { } /// <summary> /// /// </summary> /// <param name="friend"></param> public void OnRemoveFriendFromList(Friend friend) { } /// <summary> /// /// </summary> /// <param name="friend"></param> public void OnBlockFriend(Friend friend) { } /// <summary> /// /// </summary> /// <param name="friend"></param> public void OnUnblockFriend(Friend friend) { } /// <summary> /// /// </summary> /// <param name="text"></param> public void OnWriteDebug(string text) { } /// <summary> /// /// </summary> /// <param name="friend"></param> /// <param name="reason"></param> /// <param name="enableAddCheckbox"></param> public void OnPromptForStrangerHasAddedMe(Friend friend, string reason, bool enableAddCheckbox) { } #endregion } }
using Lucene.Net.Support; using System; using System.Collections.Generic; using System.Diagnostics; using System.Globalization; using System.Text; namespace Lucene.Net.Index { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /// <summary> /// <para>This class implements a <see cref="MergePolicy"/> that tries /// to merge segments into levels of exponentially /// increasing size, where each level has fewer segments than /// the value of the merge factor. Whenever extra segments /// (beyond the merge factor upper bound) are encountered, /// all segments within the level are merged. You can get or /// set the merge factor using <see cref="MergeFactor"/>.</para> /// /// <para>This class is abstract and requires a subclass to /// define the <see cref="MergePolicy.Size(SegmentCommitInfo)"/> method which specifies how a /// segment's size is determined. <see cref="LogDocMergePolicy"/> /// is one subclass that measures size by document count in /// the segment. <see cref="LogByteSizeMergePolicy"/> is another /// subclass that measures size as the total byte size of the /// file(s) for the segment.</para> /// </summary> public abstract class LogMergePolicy : MergePolicy { /// <summary> /// Defines the allowed range of log(size) for each /// level. A level is computed by taking the max segment /// log size, minus LEVEL_LOG_SPAN, and finding all /// segments falling within that range. /// </summary> public static readonly double LEVEL_LOG_SPAN = 0.75; /// <summary> /// Default merge factor, which is how many segments are /// merged at a time /// </summary> public static readonly int DEFAULT_MERGE_FACTOR = 10; /// <summary> /// Default maximum segment size. A segment of this size /// or larger will never be merged. </summary> /// <seealso cref="MaxMergeDocs"/> public static readonly int DEFAULT_MAX_MERGE_DOCS = int.MaxValue; /// <summary> /// Default noCFSRatio. If a merge's size is >= 10% of /// the index, then we disable compound file for it. </summary> /// <seealso cref="MergePolicy.NoCFSRatio"/> public new static readonly double DEFAULT_NO_CFS_RATIO = 0.1; /// <summary> /// How many segments to merge at a time. </summary> protected int m_mergeFactor = DEFAULT_MERGE_FACTOR; /// <summary> /// Any segments whose size is smaller than this value /// will be rounded up to this value. This ensures that /// tiny segments are aggressively merged. /// </summary> protected long m_minMergeSize; /// <summary> /// If the size of a segment exceeds this value then it /// will never be merged. /// </summary> protected long m_maxMergeSize; // Although the core MPs set it explicitly, we must default in case someone // out there wrote his own LMP ... /// <summary> /// If the size of a segment exceeds this value then it /// will never be merged during <see cref="IndexWriter.ForceMerge(int)"/>. /// </summary> protected long m_maxMergeSizeForForcedMerge = long.MaxValue; /// <summary> /// If a segment has more than this many documents then it /// will never be merged. /// </summary> protected int m_maxMergeDocs = DEFAULT_MAX_MERGE_DOCS; /// <summary> /// If true, we pro-rate a segment's size by the /// percentage of non-deleted documents. /// </summary> protected bool m_calibrateSizeByDeletes = true; /// <summary> /// Sole constructor. (For invocation by subclass /// constructors, typically implicit.) /// </summary> public LogMergePolicy() : base(DEFAULT_NO_CFS_RATIO, MergePolicy.DEFAULT_MAX_CFS_SEGMENT_SIZE) { } /// <summary> /// Returns true if <see cref="LogMergePolicy"/> is enabled in <see cref="IndexWriter.infoStream"/>. /// </summary> protected virtual bool IsVerbose { get { IndexWriter w = m_writer.Get(); return w != null && w.infoStream.IsEnabled("LMP"); } } /// <summary> /// Print a debug message to <see cref="IndexWriter.infoStream"/>. /// </summary> protected virtual void Message(string message) { if (IsVerbose) { m_writer.Get().infoStream.Message("LMP", message); } } /// <summary> /// Gets or Sets the number of segments that are merged at /// once and also controls the total number of segments /// allowed to accumulate in the index. /// <para/> /// This determines how often segment indices are merged by /// <see cref="IndexWriter.AddDocument(IEnumerable{IIndexableField})"/>. With smaller values, less RAM is used /// while indexing, and searches are /// faster, but indexing speed is slower. With larger /// values, more RAM is used during indexing, and while /// searches is slower, indexing is /// faster. Thus larger values (> 10) are best for batch /// index creation, and smaller values (&lt; 10) for indices /// that are interactively maintained. /// </summary> public virtual int MergeFactor { get { return m_mergeFactor; } set { if (value < 2) { throw new System.ArgumentException("mergeFactor cannot be less than 2"); } this.m_mergeFactor = value; } } /// <summary> /// Gets or Sets whether the segment size should be calibrated by /// the number of deletes when choosing segments for merge. /// </summary> public virtual bool CalibrateSizeByDeletes { set { this.m_calibrateSizeByDeletes = value; } get { return m_calibrateSizeByDeletes; } } protected override void Dispose(bool disposing) { } /// <summary> /// Return the number of documents in the provided /// <see cref="SegmentCommitInfo"/>, pro-rated by percentage of /// non-deleted documents if /// <see cref="CalibrateSizeByDeletes"/> is set. /// </summary> protected virtual long SizeDocs(SegmentCommitInfo info) { if (m_calibrateSizeByDeletes) { int delCount = m_writer.Get().NumDeletedDocs(info); Debug.Assert(delCount <= info.Info.DocCount); return (info.Info.DocCount - (long)delCount); } else { return info.Info.DocCount; } } /// <summary> /// Return the byte size of the provided /// <see cref="SegmentCommitInfo"/>, pro-rated by percentage of /// non-deleted documents if /// <see cref="CalibrateSizeByDeletes"/> is set. /// </summary> protected virtual long SizeBytes(SegmentCommitInfo info) { if (m_calibrateSizeByDeletes) { return base.Size(info); } return info.GetSizeInBytes(); } /// <summary> /// Returns <c>true</c> if the number of segments eligible for /// merging is less than or equal to the specified /// <paramref name="maxNumSegments"/>. /// </summary> protected virtual bool IsMerged(SegmentInfos infos, int maxNumSegments, IDictionary<SegmentCommitInfo, bool?> segmentsToMerge) { int numSegments = infos.Count; int numToMerge = 0; SegmentCommitInfo mergeInfo = null; bool segmentIsOriginal = false; for (int i = 0; i < numSegments && numToMerge <= maxNumSegments; i++) { SegmentCommitInfo info = infos.Info(i); bool? isOriginal; segmentsToMerge.TryGetValue(info, out isOriginal); if (isOriginal != null) { segmentIsOriginal = isOriginal.Value; numToMerge++; mergeInfo = info; } } return numToMerge <= maxNumSegments && (numToMerge != 1 || !segmentIsOriginal || IsMerged(infos, mergeInfo)); } /// <summary> /// Returns the merges necessary to merge the index, taking the max merge /// size or max merge docs into consideration. this method attempts to respect /// the <paramref name="maxNumSegments"/> parameter, however it might be, due to size /// constraints, that more than that number of segments will remain in the /// index. Also, this method does not guarantee that exactly /// <paramref name="maxNumSegments"/> will remain, but &lt;= that number. /// </summary> private MergeSpecification FindForcedMergesSizeLimit(SegmentInfos infos, int maxNumSegments, int last) { MergeSpecification spec = new MergeSpecification(); IList<SegmentCommitInfo> segments = infos.AsList(); int start = last - 1; while (start >= 0) { SegmentCommitInfo info = infos.Info(start); if (Size(info) > m_maxMergeSizeForForcedMerge || SizeDocs(info) > m_maxMergeDocs) { if (IsVerbose) { Message("findForcedMergesSizeLimit: skip segment=" + info + ": size is > maxMergeSize (" + m_maxMergeSizeForForcedMerge + ") or sizeDocs is > maxMergeDocs (" + m_maxMergeDocs + ")"); } // need to skip that segment + add a merge for the 'right' segments, // unless there is only 1 which is merged. if (last - start - 1 > 1 || (start != last - 1 && !IsMerged(infos, infos.Info(start + 1)))) { // there is more than 1 segment to the right of // this one, or a mergeable single segment. spec.Add(new OneMerge(segments.SubList(start + 1, last))); } last = start; } else if (last - start == m_mergeFactor) { // mergeFactor eligible segments were found, add them as a merge. spec.Add(new OneMerge(segments.SubList(start, last))); last = start; } --start; } // Add any left-over segments, unless there is just 1 // already fully merged if (last > 0 && (++start + 1 < last || !IsMerged(infos, infos.Info(start)))) { spec.Add(new OneMerge(segments.SubList(start, last))); } return spec.Merges.Count == 0 ? null : spec; } /// <summary> /// Returns the merges necessary to <see cref="IndexWriter.ForceMerge(int)"/> the index. this method constraints /// the returned merges only by the <paramref name="maxNumSegments"/> parameter, and /// guaranteed that exactly that number of segments will remain in the index. /// </summary> private MergeSpecification FindForcedMergesMaxNumSegments(SegmentInfos infos, int maxNumSegments, int last) { var spec = new MergeSpecification(); var segments = infos.AsList(); // First, enroll all "full" merges (size // mergeFactor) to potentially be run concurrently: while (last - maxNumSegments + 1 >= m_mergeFactor) { spec.Add(new OneMerge(segments.SubList(last - m_mergeFactor, last))); last -= m_mergeFactor; } // Only if there are no full merges pending do we // add a final partial (< mergeFactor segments) merge: if (0 == spec.Merges.Count) { if (maxNumSegments == 1) { // Since we must merge down to 1 segment, the // choice is simple: if (last > 1 || !IsMerged(infos, infos.Info(0))) { spec.Add(new OneMerge(segments.SubList(0, last))); } } else if (last > maxNumSegments) { // Take care to pick a partial merge that is // least cost, but does not make the index too // lopsided. If we always just picked the // partial tail then we could produce a highly // lopsided index over time: // We must merge this many segments to leave // maxNumSegments in the index (from when // forceMerge was first kicked off): int finalMergeSize = last - maxNumSegments + 1; // Consider all possible starting points: long bestSize = 0; int bestStart = 0; for (int i = 0; i < last - finalMergeSize + 1; i++) { long sumSize = 0; for (int j = 0; j < finalMergeSize; j++) { sumSize += Size(infos.Info(j + i)); } if (i == 0 || (sumSize < 2 * Size(infos.Info(i - 1)) && sumSize < bestSize)) { bestStart = i; bestSize = sumSize; } } spec.Add(new OneMerge(segments.SubList(bestStart, bestStart + finalMergeSize))); } } return spec.Merges.Count == 0 ? null : spec; } // LUCENENET TODO: Get rid of the nullable in IDictionary<SegmentCommitInfo, bool?>, if possible /// <summary> /// Returns the merges necessary to merge the index down /// to a specified number of segments. /// this respects the <see cref="m_maxMergeSizeForForcedMerge"/> setting. /// By default, and assuming <c>maxNumSegments=1</c>, only /// one segment will be left in the index, where that segment /// has no deletions pending nor separate norms, and it is in /// compound file format if the current useCompoundFile /// setting is <c>true</c>. This method returns multiple merges /// (mergeFactor at a time) so the <see cref="MergeScheduler"/> /// in use may make use of concurrency. /// </summary> public override MergeSpecification FindForcedMerges(SegmentInfos infos, int maxNumSegments, IDictionary<SegmentCommitInfo, bool?> segmentsToMerge) { Debug.Assert(maxNumSegments > 0); if (IsVerbose) { Message("findForcedMerges: maxNumSegs=" + maxNumSegments + " segsToMerge=" + Arrays.ToString(segmentsToMerge)); } // If the segments are already merged (e.g. there's only 1 segment), or // there are <maxNumSegments:. if (IsMerged(infos, maxNumSegments, segmentsToMerge)) { if (IsVerbose) { Message("already merged; skip"); } return null; } // Find the newest (rightmost) segment that needs to // be merged (other segments may have been flushed // since merging started): int last = infos.Count; while (last > 0) { SegmentCommitInfo info = infos.Info(--last); if (segmentsToMerge.ContainsKey(info)) { last++; break; } } if (last == 0) { if (IsVerbose) { Message("last == 0; skip"); } return null; } // There is only one segment already, and it is merged if (maxNumSegments == 1 && last == 1 && IsMerged(infos, infos.Info(0))) { if (IsVerbose) { Message("already 1 seg; skip"); } return null; } // Check if there are any segments above the threshold bool anyTooLarge = false; for (int i = 0; i < last; i++) { SegmentCommitInfo info = infos.Info(i); if (Size(info) > m_maxMergeSizeForForcedMerge || SizeDocs(info) > m_maxMergeDocs) { anyTooLarge = true; break; } } if (anyTooLarge) { return FindForcedMergesSizeLimit(infos, maxNumSegments, last); } else { return FindForcedMergesMaxNumSegments(infos, maxNumSegments, last); } } /// <summary> /// Finds merges necessary to force-merge all deletes from the /// index. We simply merge adjacent segments that have /// deletes, up to mergeFactor at a time. /// </summary> public override MergeSpecification FindForcedDeletesMerges(SegmentInfos segmentInfos) { var segments = segmentInfos.AsList(); int numSegments = segments.Count; if (IsVerbose) { Message("findForcedDeleteMerges: " + numSegments + " segments"); } var spec = new MergeSpecification(); int firstSegmentWithDeletions = -1; IndexWriter w = m_writer.Get(); Debug.Assert(w != null); for (int i = 0; i < numSegments; i++) { SegmentCommitInfo info = segmentInfos.Info(i); int delCount = w.NumDeletedDocs(info); if (delCount > 0) { if (IsVerbose) { Message(" segment " + info.Info.Name + " has deletions"); } if (firstSegmentWithDeletions == -1) { firstSegmentWithDeletions = i; } else if (i - firstSegmentWithDeletions == m_mergeFactor) { // We've seen mergeFactor segments in a row with // deletions, so force a merge now: if (IsVerbose) { Message(" add merge " + firstSegmentWithDeletions + " to " + (i - 1) + " inclusive"); } spec.Add(new OneMerge(segments.SubList(firstSegmentWithDeletions, i))); firstSegmentWithDeletions = i; } } else if (firstSegmentWithDeletions != -1) { // End of a sequence of segments with deletions, so, // merge those past segments even if it's fewer than // mergeFactor segments if (IsVerbose) { Message(" add merge " + firstSegmentWithDeletions + " to " + (i - 1) + " inclusive"); } spec.Add(new OneMerge(segments.SubList(firstSegmentWithDeletions, i))); firstSegmentWithDeletions = -1; } } if (firstSegmentWithDeletions != -1) { if (IsVerbose) { Message(" add merge " + firstSegmentWithDeletions + " to " + (numSegments - 1) + " inclusive"); } spec.Add(new OneMerge(segments.SubList(firstSegmentWithDeletions, numSegments))); } return spec; } private class SegmentInfoAndLevel : IComparable<SegmentInfoAndLevel> { internal readonly SegmentCommitInfo info; internal readonly float level; private int index; public SegmentInfoAndLevel(SegmentCommitInfo info, float level, int index) { this.info = info; this.level = level; this.index = index; } // Sorts largest to smallest public virtual int CompareTo(SegmentInfoAndLevel other) { return other.level.CompareTo(level); } } /// <summary> /// Checks if any merges are now necessary and returns a /// <see cref="MergePolicy.MergeSpecification"/> if so. A merge /// is necessary when there are more than /// <see cref="MergeFactor"/> segments at a given level. When /// multiple levels have too many segments, this method /// will return multiple merges, allowing the /// <see cref="MergeScheduler"/> to use concurrency. /// </summary> public override MergeSpecification FindMerges(MergeTrigger mergeTrigger, SegmentInfos infos) { int numSegments = infos.Count; if (IsVerbose) { Message("findMerges: " + numSegments + " segments"); } // Compute levels, which is just log (base mergeFactor) // of the size of each segment IList<SegmentInfoAndLevel> levels = new List<SegmentInfoAndLevel>(); var norm = (float)Math.Log(m_mergeFactor); ICollection<SegmentCommitInfo> mergingSegments = m_writer.Get().MergingSegments; for (int i = 0; i < numSegments; i++) { SegmentCommitInfo info = infos.Info(i); long size = Size(info); // Floor tiny segments if (size < 1) { size = 1; } SegmentInfoAndLevel infoLevel = new SegmentInfoAndLevel(info, (float)Math.Log(size) / norm, i); levels.Add(infoLevel); if (IsVerbose) { long segBytes = SizeBytes(info); string extra = mergingSegments.Contains(info) ? " [merging]" : ""; if (size >= m_maxMergeSize) { extra += " [skip: too large]"; } Message("seg=" + m_writer.Get().SegString(info) + " level=" + infoLevel.level + " size=" + String.Format(CultureInfo.InvariantCulture, "{0:0.00} MB", segBytes / 1024 / 1024.0) + extra); } } float levelFloor; if (m_minMergeSize <= 0) { levelFloor = (float)0.0; } else { levelFloor = (float)(Math.Log(m_minMergeSize) / norm); } // Now, we quantize the log values into levels. The // first level is any segment whose log size is within // LEVEL_LOG_SPAN of the max size, or, who has such as // segment "to the right". Then, we find the max of all // other segments and use that to define the next level // segment, etc. MergeSpecification spec = null; int numMergeableSegments = levels.Count; int start = 0; while (start < numMergeableSegments) { // Find max level of all segments not already // quantized. float maxLevel = levels[start].level; for (int i = 1 + start; i < numMergeableSegments; i++) { float level = levels[i].level; if (level > maxLevel) { maxLevel = level; } } // Now search backwards for the rightmost segment that // falls into this level: float levelBottom; if (maxLevel <= levelFloor) { // All remaining segments fall into the min level levelBottom = -1.0F; } else { levelBottom = (float)(maxLevel - LEVEL_LOG_SPAN); // Force a boundary at the level floor if (levelBottom < levelFloor && maxLevel >= levelFloor) { levelBottom = levelFloor; } } int upto = numMergeableSegments - 1; while (upto >= start) { if (levels[upto].level >= levelBottom) { break; } upto--; } if (IsVerbose) { Message(" level " + levelBottom.ToString("0.0") + " to " + maxLevel.ToString("0.0") + ": " + (1 + upto - start) + " segments"); } // Finally, record all merges that are viable at this level: int end = start + m_mergeFactor; while (end <= 1 + upto) { bool anyTooLarge = false; bool anyMerging = false; for (int i = start; i < end; i++) { SegmentCommitInfo info = levels[i].info; anyTooLarge |= (Size(info) >= m_maxMergeSize || SizeDocs(info) >= m_maxMergeDocs); if (mergingSegments.Contains(info)) { anyMerging = true; break; } } if (anyMerging) { // skip } else if (!anyTooLarge) { if (spec == null) { spec = new MergeSpecification(); } IList<SegmentCommitInfo> mergeInfos = new List<SegmentCommitInfo>(); for (int i = start; i < end; i++) { mergeInfos.Add(levels[i].info); Debug.Assert(infos.Contains(levels[i].info)); } if (IsVerbose) { Message(" add merge=" + m_writer.Get().SegString(mergeInfos) + " start=" + start + " end=" + end); } spec.Add(new OneMerge(mergeInfos)); } else if (IsVerbose) { Message(" " + start + " to " + end + ": contains segment over maxMergeSize or maxMergeDocs; skipping"); } start = end; end = start + m_mergeFactor; } start = 1 + upto; } return spec; } /// <summary> /// <para>Determines the largest segment (measured by /// document count) that may be merged with other segments. /// Small values (e.g., less than 10,000) are best for /// interactive indexing, as this limits the length of /// pauses while indexing to a few seconds. Larger values /// are best for batched indexing and speedier /// searches.</para> /// /// <para>The default value is <see cref="int.MaxValue"/>.</para> /// /// <para>The default merge policy /// (<see cref="LogByteSizeMergePolicy"/>) also allows you to set this /// limit by net size (in MB) of the segment, using /// <see cref="LogByteSizeMergePolicy.MaxMergeMB"/>.</para> /// </summary> public virtual int MaxMergeDocs { set { this.m_maxMergeDocs = value; } get { return m_maxMergeDocs; } } public override string ToString() { StringBuilder sb = new StringBuilder("[" + this.GetType().Name + ": "); sb.Append("minMergeSize=").Append(m_minMergeSize).Append(", "); sb.Append("mergeFactor=").Append(m_mergeFactor).Append(", "); sb.Append("maxMergeSize=").Append(m_maxMergeSize).Append(", "); sb.Append("maxMergeSizeForForcedMerge=").Append(m_maxMergeSizeForForcedMerge).Append(", "); sb.Append("calibrateSizeByDeletes=").Append(m_calibrateSizeByDeletes).Append(", "); sb.Append("maxMergeDocs=").Append(m_maxMergeDocs).Append(", "); sb.Append("maxCFSSegmentSizeMB=").Append(MaxCFSSegmentSizeMB).Append(", "); sb.Append("noCFSRatio=").Append(m_noCFSRatio); sb.Append("]"); return sb.ToString(); } } }
// // BEncodedList.cs // // Authors: // Alan McGovern [email protected] // // Copyright (C) 2006 Alan McGovern // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.Collections.Generic; using System.Text; using System.IO; namespace TorrentHardLinkHelper.BEncoding { /// <summary> /// Class representing a BEncoded list /// </summary> public class BEncodedList : BEncodedValue, IList<BEncodedValue> { #region Member Variables private List<BEncodedValue> list; #endregion #region Constructors /// <summary> /// Create a new BEncoded List with default capacity /// </summary> public BEncodedList() : this(new List<BEncodedValue>()) { } /// <summary> /// Create a new BEncoded List with the supplied capacity /// </summary> /// <param name="capacity">The initial capacity</param> public BEncodedList(int capacity) : this(new List<BEncodedValue>(capacity)) { } public BEncodedList(IEnumerable<BEncodedValue> list) { if (list == null) throw new ArgumentNullException("list"); this.list = new List<BEncodedValue>(list); } private BEncodedList(List<BEncodedValue> value) { this.list = value; } #endregion #region Encode/Decode Methods /// <summary> /// Encodes the list to a byte[] /// </summary> /// <param name="buffer">The buffer to encode the list to</param> /// <param name="offset">The offset to start writing the data at</param> /// <returns></returns> public override int Encode(byte[] buffer, int offset) { int written = 0; buffer[offset] = (byte)'l'; written++; for (int i = 0; i < this.list.Count; i++) written += this.list[i].Encode(buffer, offset + written); buffer[offset + written] = (byte)'e'; written++; return written; } /// <summary> /// Decodes a BEncodedList from the given StreamReader /// </summary> /// <param name="reader"></param> internal override void DecodeInternal(RawReader reader) { if (reader.ReadByte() != 'l') // Remove the leading 'l' throw new BEncodingException("Invalid data found. Aborting"); while ((reader.PeekByte() != -1) && (reader.PeekByte() != 'e')) list.Add(BEncodedValue.Decode(reader)); if (reader.ReadByte() != 'e') // Remove the trailing 'e' throw new BEncodingException("Invalid data found. Aborting"); } #endregion #region Helper Methods /// <summary> /// Returns the size of the list in bytes /// </summary> /// <returns></returns> public override int LengthInBytes() { int length = 0; length += 1; // Lists start with 'l' for (int i=0; i < this.list.Count; i++) length += this.list[i].LengthInBytes(); length += 1; // Lists end with 'e' return length; } #endregion #region Overridden Methods public override bool Equals(object obj) { BEncodedList other = obj as BEncodedList; if (other == null) return false; for (int i = 0; i < this.list.Count; i++) if (!this.list[i].Equals(other.list[i])) return false; return true; } public override int GetHashCode() { int result = 0; for (int i = 0; i < list.Count; i++) result ^= list[i].GetHashCode(); return result; } public override string ToString() { return System.Text.Encoding.UTF8.GetString(Encode()); } #endregion #region IList methods public void Add(BEncodedValue item) { this.list.Add(item); } public void AddRange (IEnumerable<BEncodedValue> collection) { list.AddRange (collection); } public void Clear() { this.list.Clear(); } public bool Contains(BEncodedValue item) { return this.list.Contains(item); } public void CopyTo(BEncodedValue[] array, int arrayIndex) { this.list.CopyTo(array, arrayIndex); } public int Count { get { return this.list.Count; } } public int IndexOf(BEncodedValue item) { return this.list.IndexOf(item); } public void Insert(int index, BEncodedValue item) { this.list.Insert(index, item); } public bool IsReadOnly { get { return false; } } public bool Remove(BEncodedValue item) { return this.list.Remove(item); } public void RemoveAt(int index) { this.list.RemoveAt(index); } public BEncodedValue this[int index] { get { return this.list[index]; } set { this.list[index] = value; } } public IEnumerator<BEncodedValue> GetEnumerator() { return this.list.GetEnumerator(); } System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { return this.GetEnumerator(); } #endregion } }
/* Copyright 2012 Michael Edwards Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ //-CRE- using System; using System.Collections.Generic; using System.Linq; using Glass.Mapper.Configuration; using Glass.Mapper.Pipelines.ConfigurationResolver.Tasks.OnDemandResolver; using Glass.Mapper.Pipelines.DataMapperResolver; using System.Collections.Concurrent; using Castle.Core.Logging; using Glass.Mapper.IoC; namespace Glass.Mapper { /// <summary> /// The context contains the configuration of Glass.Mapper /// </summary> public class Context { /// <summary> /// The default context name /// </summary> public const string DefaultContextName = "Default"; #region STATICS /// <summary> /// The default Context. Used by services if no Context is specified. /// </summary> /// <value>The default.</value> public static Context Default { get; private set; } /// <summary> /// Contains the list of Contexts currently loaded. /// </summary> /// <value>The contexts.</value> public static IDictionary<string, Context> Contexts { get; private set; } /// <summary> /// Initializes static members of the <see cref="Context"/> class. /// </summary> static Context() { Contexts = new Dictionary<string, Context>(); } /// <summary> /// Creates a Context and creates it as the default Context. This is assigned to the Default static property. /// </summary> /// <param name="resolver">The resolver.</param> /// <returns>Context.</returns> public static Context Create(IDependencyResolver resolver) { return Context.Create(resolver, DefaultContextName, true); } /// <summary> /// Creates a new context and adds it to the Contexts dictionary. /// </summary> /// <param name="resolver">The resolver.</param> /// <param name="contextName">The context name, used as the key in the Contexts dictionary.</param> /// <param name="isDefault">Indicates if this is the default context. If it is the context is assigned to the Default static property.</param> /// <returns>Context.</returns> /// <exception cref="System.NullReferenceException">No dependency resolver set.</exception> public static Context Create(IDependencyResolver resolver, string contextName, bool isDefault = false) { if (resolver == null) throw new NullReferenceException("No dependency resolver set."); var context = new Context(); context.DependencyResolver = resolver; context.Name = contextName; Contexts[contextName] = context; if (isDefault) Default = context; return context; } /// <summary> /// Clears all static and default contexts /// </summary> public static void Clear() { Default = null; Contexts = new Dictionary<string, Context>(); } #endregion public string Name { get; private set; } /// <summary> /// List of the type configurations loaded by this context /// </summary> /// <value>The type configurations.</value> public ConcurrentDictionary<Type, AbstractTypeConfiguration> TypeConfigurations { get; private set; } /// <summary> /// The dependency resolver used by services using the context /// </summary> /// <value>The dependency resolver.</value> public IDependencyResolver DependencyResolver { get; set; } public ILogger Log { get; set; } /// <summary> /// Prevents a default instance of the <see cref="Context"/> class from being created. /// </summary> private Context() { TypeConfigurations = new ConcurrentDictionary<Type, AbstractTypeConfiguration>(); Log = new NullLogger(); } /// <summary> /// Gets a type configuration based on type /// </summary> /// <param name="type">The type.</param> /// <returns>AbstractTypeConfiguration.</returns> public AbstractTypeConfiguration this[Type type] { get { if (TypeConfigurations.ContainsKey(type)) return TypeConfigurations[type]; else return null; } } /// <summary> /// Loads the specified loaders. /// </summary> /// <param name="loaders">The list of configuration loaders to load into the context.</param> public void Load(params IConfigurationLoader[] loaders) { if (loaders.Any()) { var typeConfigurations = loaders .Select(loader => loader.Load()).Aggregate((x, y) => x.Union(y)); //first we have to add each type config to the collection foreach (var typeConfig in typeConfigurations) { //don't load generic types //see https://github.com/mikeedwards83/Glass.Mapper/issues/85 if (typeConfig.Type.IsGenericTypeDefinition) { continue; } if (TypeConfigurations.ContainsKey(typeConfig.Type)){ Log.Warn("Tried to add type {0} to TypeConfigurationDictioary twice".Formatted(typeConfig.Type)); continue; } typeConfig.PerformAutoMap(); ProcessProperties(typeConfig.Properties); if (!TypeConfigurations.TryAdd(typeConfig.Type, typeConfig)) { Log.Warn("Failed to add type {0} to TypeConfigurationDictionary".Formatted(typeConfig.Type)); } } } } /// <summary> /// Processes the properties. /// </summary> /// <param name="properties">The properties.</param> /// <exception cref="System.NullReferenceException">Could not find data mapper for property {0} on type {1} /// .Formatted(property.PropertyInfo.Name,property.PropertyInfo.ReflectedType.FullName)</exception> private void ProcessProperties(IEnumerable<AbstractPropertyConfiguration> properties ) { DataMapperResolver runner = new DataMapperResolver(DependencyResolver.DataMapperResolverFactory.GetItems()); foreach(var property in properties.Where(x=>x.Mapper == null)) { DataMapperResolverArgs args = new DataMapperResolverArgs(this, property); args.PropertyConfiguration = property; args.DataMappers = DependencyResolver.DataMapperFactory.GetItems(); runner.Run(args); if(args.Result == null) { throw new NullReferenceException( "Could not find data mapper for property {0} on type {1}" .Formatted(property.PropertyInfo.Name,property.PropertyInfo.ReflectedType.FullName)); } property.Mapper = args.Result; } } /// <summary> /// Gets the type configuration. /// </summary> /// <param name="obj">The obj.</param> /// <returns>AbstractTypeConfiguration.</returns> public T GetTypeConfiguration<T>(object obj, bool doNotLoad = false, bool checkBase = true) where T : AbstractTypeConfiguration, new() { return GetTypeConfigurationFromType<T>(obj.GetType(), doNotLoad, checkBase); } /// <summary> /// Gets the type configuration. /// </summary> /// <param name="obj">The obj.</param> /// <returns>AbstractTypeConfiguration.</returns> public T GetTypeConfigurationFromType<T>(Type type, bool doNotLoad = false, bool checkBase = true) where T : AbstractTypeConfiguration, new() { var config = TypeConfigurations.ContainsKey(type) ? TypeConfigurations[type] : null; if (config != null) return config as T; if (checkBase && type.BaseType != null) { //check base type encase of proxy config = TypeConfigurations.ContainsKey(type.BaseType) ? TypeConfigurations[type.BaseType] : null; } if (config != null) return config as T; //check interfaces encase this is an interface proxy string name = type.Name; //ME - I added the OrderByDescending in response to issue 53 // raised on the Glass.Sitecore.Mapper project. Longest name should be compared first // to get the most specific interface var interfaceType = type.GetInterfaces() .OrderByDescending(x => x.Name.Length) .FirstOrDefault(x => name.Contains(x.Name)); if (interfaceType != null) config = TypeConfigurations.ContainsKey(interfaceType) ? TypeConfigurations[interfaceType] : null; if (config == null && !doNotLoad) { Load(new OnDemandLoader<T>(type)); return GetTypeConfigurationFromType<T>(type, true); } return config as T; } } }
using System; using System.Drawing; using System.Windows.Forms; using System.ComponentModel; using System.ComponentModel.Design; using System.Runtime.InteropServices; namespace xDockPanel { partial class DockPanel { // This class comes from Jacob Slusser's MdiClientController class: // http://www.codeproject.com/cs/miscctrl/mdiclientcontroller.asp private class MdiClientController : NativeWindow, IComponent, IDisposable { private bool m_autoScroll = true; private BorderStyle m_borderStyle = BorderStyle.Fixed3D; private MdiClient m_mdiClient = null; private Form m_parentForm = null; private ISite m_site = null; public MdiClientController() { } public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } protected virtual void Dispose(bool disposing) { if (disposing) { if (Site != null && Site.Container != null) Site.Container.Remove(this); if (Disposed != null) Disposed(this, EventArgs.Empty); } } public bool AutoScroll { get { return m_autoScroll; } set { // By default the MdiClient control scrolls. It can appear though that // there are no scrollbars by turning them off when the non-client // area is calculated. I decided to expose this method following // the .NET vernacular of an AutoScroll property. m_autoScroll = value; if (MdiClient != null) UpdateStyles(); } } public BorderStyle BorderStyle { set { // Error-check the enum. if (!Enum.IsDefined(typeof(BorderStyle), value)) throw new InvalidEnumArgumentException(); m_borderStyle = value; if (MdiClient == null) return; // This property can actually be visible in design-mode, // but to keep it consistent with the others, // prevent this from being show at design-time. if (Site != null && Site.DesignMode) return; // There is no BorderStyle property exposed by the MdiClient class, // but this can be controlled by Win32 functions. A Win32 ExStyle // of WS_EX_CLIENTEDGE is equivalent to a Fixed3D border and a // Style of WS_BORDER is equivalent to a FixedSingle border. // This code is inspired Jason Dori's article: // "Adding designable borders to user controls". // http://www.codeproject.com/cs/miscctrl/CsAddingBorders.asp // Get styles using Win32 calls int style = NativeMethods.GetWindowLong(MdiClient.Handle, (int)Win32.GetWindowLongIndex.GWL_STYLE); int exStyle = NativeMethods.GetWindowLong(MdiClient.Handle, (int)Win32.GetWindowLongIndex.GWL_EXSTYLE); // Add or remove style flags as necessary. switch (m_borderStyle) { case BorderStyle.Fixed3D: exStyle |= (int)Win32.WindowExStyles.WS_EX_CLIENTEDGE; style &= ~((int)Win32.WindowStyles.WS_BORDER); break; case BorderStyle.FixedSingle: exStyle &= ~((int)Win32.WindowExStyles.WS_EX_CLIENTEDGE); style |= (int)Win32.WindowStyles.WS_BORDER; break; case BorderStyle.None: style &= ~((int)Win32.WindowStyles.WS_BORDER); exStyle &= ~((int)Win32.WindowExStyles.WS_EX_CLIENTEDGE); break; } // Set the styles using Win32 calls NativeMethods.SetWindowLong(MdiClient.Handle, (int)Win32.GetWindowLongIndex.GWL_STYLE, style); NativeMethods.SetWindowLong(MdiClient.Handle, (int)Win32.GetWindowLongIndex.GWL_EXSTYLE, exStyle); // Cause an update of the non-client area. UpdateStyles(); } } public MdiClient MdiClient { get { return m_mdiClient; } } [Browsable(false)] [DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)] public Form ParentForm { get { return m_parentForm; } set { // If the ParentForm has previously been set, // unwire events connected to the old parent. if (m_parentForm != null) { m_parentForm.HandleCreated -= new EventHandler(ParentFormHandleCreated); m_parentForm.MdiChildActivate -= new EventHandler(ParentFormMdiChildActivate); } m_parentForm = value; if (m_parentForm == null) return; // If the parent form has not been created yet, // wait to initialize the MDI client until it is. if (m_parentForm.IsHandleCreated) { InitializeMdiClient(); RefreshProperties(); } else m_parentForm.HandleCreated += new EventHandler(ParentFormHandleCreated); m_parentForm.MdiChildActivate += new EventHandler(ParentFormMdiChildActivate); } } public ISite Site { get { return m_site; } set { m_site = value; if (m_site == null) return; // If the component is dropped onto a form during design-time, // set the ParentForm property. IDesignerHost host = (value.GetService(typeof(IDesignerHost)) as IDesignerHost); if (host != null) { Form parent = host.RootComponent as Form; if (parent != null) ParentForm = parent; } } } public void RenewMdiClient() { // Reinitialize the MdiClient and its properties. InitializeMdiClient(); RefreshProperties(); } public event EventHandler Disposed; public event EventHandler HandleAssigned; public event EventHandler MdiChildActivate; public event LayoutEventHandler Layout; protected virtual void OnHandleAssigned(EventArgs e) { // Raise the HandleAssigned event. if (HandleAssigned != null) HandleAssigned(this, e); } protected virtual void OnMdiChildActivate(EventArgs e) { // Raise the MdiChildActivate event if (MdiChildActivate != null) MdiChildActivate(this, e); } protected virtual void OnLayout(LayoutEventArgs e) { // Raise the Layout event if (Layout != null) Layout(this, e); } public event PaintEventHandler Paint; protected virtual void OnPaint(PaintEventArgs e) { // Raise the Paint event. if (Paint != null) Paint(this, e); } protected override void WndProc(ref Message m) { switch (m.Msg) { case (int)Win32.Msgs.WM_NCCALCSIZE: // If AutoScroll is set to false, hide the scrollbars when the control // calculates its non-client area. if (!AutoScroll) NativeMethods.ShowScrollBar(m.HWnd, (int)Win32.ScrollBars.SB_BOTH, 0 /*false*/); break; } base.WndProc(ref m); } private void ParentFormHandleCreated(object sender, EventArgs e) { // The form has been created, unwire the event, and initialize the MdiClient. this.m_parentForm.HandleCreated -= new EventHandler(ParentFormHandleCreated); InitializeMdiClient(); RefreshProperties(); } private void ParentFormMdiChildActivate(object sender, EventArgs e) { OnMdiChildActivate(e); } private void MdiClientLayout(object sender, LayoutEventArgs e) { OnLayout(e); } private void MdiClientHandleDestroyed(object sender, EventArgs e) { // If the MdiClient handle has been released, drop the reference and // release the handle. if (m_mdiClient != null) { m_mdiClient.HandleDestroyed -= new EventHandler(MdiClientHandleDestroyed); m_mdiClient = null; } ReleaseHandle(); } private void InitializeMdiClient() { // If the mdiClient has previously been set, unwire events connected // to the old MDI. if (MdiClient != null) { MdiClient.HandleDestroyed -= new EventHandler(MdiClientHandleDestroyed); MdiClient.Layout -= new LayoutEventHandler(MdiClientLayout); } if (ParentForm == null) return; // Get the MdiClient from the parent form. foreach (Control control in ParentForm.Controls) { // If the form is an MDI container, it will contain an MdiClient control // just as it would any other control. m_mdiClient = control as MdiClient; if (m_mdiClient == null) continue; // Assign the MdiClient Handle to the NativeWindow. ReleaseHandle(); AssignHandle(MdiClient.Handle); // Raise the HandleAssigned event. OnHandleAssigned(EventArgs.Empty); // Monitor the MdiClient for when its handle is destroyed. MdiClient.HandleDestroyed += new EventHandler(MdiClientHandleDestroyed); MdiClient.Layout += new LayoutEventHandler(MdiClientLayout); break; } } private void RefreshProperties() { // Refresh all the properties BorderStyle = m_borderStyle; AutoScroll = m_autoScroll; } private void UpdateStyles() { // To show style changes, the non-client area must be repainted. Using the // control's Invalidate method does not affect the non-client area. // Instead use a Win32 call to signal the style has changed. NativeMethods.SetWindowPos(MdiClient.Handle, IntPtr.Zero, 0, 0, 0, 0, Win32.FlagsSetWindowPos.SWP_NOACTIVATE | Win32.FlagsSetWindowPos.SWP_NOMOVE | Win32.FlagsSetWindowPos.SWP_NOSIZE | Win32.FlagsSetWindowPos.SWP_NOZORDER | Win32.FlagsSetWindowPos.SWP_NOOWNERZORDER | Win32.FlagsSetWindowPos.SWP_FRAMECHANGED); } } private MdiClientController m_mdiClientController = null; private MdiClientController GetMdiClientController() { if (m_mdiClientController == null) { m_mdiClientController = new MdiClientController(); m_mdiClientController.HandleAssigned += new EventHandler(MdiClientHandleAssigned); m_mdiClientController.MdiChildActivate += new EventHandler(ParentFormMdiChildActivate); m_mdiClientController.Layout += new LayoutEventHandler(MdiClient_Layout); } return m_mdiClientController; } private void ParentFormMdiChildActivate(object sender, EventArgs e) { if (GetMdiClientController().ParentForm == null) return; IDockContent content = GetMdiClientController().ParentForm.ActiveMdiChild as IDockContent; if (content == null) return; if (content.DockHandler.DockPanel == this && content.DockHandler.Pane != null) content.DockHandler.Pane.ActiveContent = content; } private bool MdiClientExists { get { return GetMdiClientController().MdiClient != null; } } private void SetMdiClientBounds(Rectangle bounds) { GetMdiClientController().MdiClient.Bounds = bounds; } private void SuspendMdiClientLayout() { if (GetMdiClientController().MdiClient != null) GetMdiClientController().MdiClient.SuspendLayout(); } private void ResumeMdiClientLayout(bool perform) { if (GetMdiClientController().MdiClient != null) GetMdiClientController().MdiClient.ResumeLayout(perform); } private void PerformMdiClientLayout() { if (GetMdiClientController().MdiClient != null) GetMdiClientController().MdiClient.PerformLayout(); } // Called when: // 1. DockPanel.DocumentStyle changed // 2. DockPanel.Visible changed // 3. MdiClientController.Handle assigned private void SetMdiClient() { MdiClientController controller = GetMdiClientController(); controller.AutoScroll = false; controller.BorderStyle = BorderStyle.None; if (MdiClientExists) controller.MdiClient.Dock = DockStyle.Fill; } internal Rectangle RectangleToMdiClient(Rectangle rect) { if (MdiClientExists) return GetMdiClientController().MdiClient.RectangleToClient(rect); else return Rectangle.Empty; } } }
/* * * (c) Copyright Ascensio System Limited 2010-2021 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Web; using ASC.Core; using ASC.CRM.Core; using ASC.Web.Core.Utility.Skins; using ASC.Web.CRM.Classes; using ASC.Web.CRM.Configuration; using ASC.Web.CRM.Resources; using ASC.Web.Studio.Core.Users; namespace ASC.Web.CRM.Controls.Common { public partial class ImportFromCSVView : BaseUserControl { #region Property public static String Location { get { return PathProvider.GetFileStaticRelativePath("Common/ImportFromCSVView.ascx"); } } public EntityType EntityType; protected String ImportFromCSVStepOneHeaderLabel; protected String ImportFromCSVStepTwoHeaderLabel; protected String ImportFromCSVStepOneDescriptionLabel; protected String ImportFromCSVStepTwoDescriptionLabel; protected String StartImportLabel; protected String ImportStartingPanelHeaderLabel; protected String ImportStartingPanelDescriptionLabel; protected String ImportStartingPanelButtonLabel; protected String GoToRedirectURL; protected String ImportImgSrc; #endregion #region Events protected void InitForContacts() { StartImportLabel = CRMContactResource.StartImport; ImportFromCSVStepOneHeaderLabel = CRMContactResource.ImportFromCSVStepOneHeader; ImportFromCSVStepTwoHeaderLabel = CRMContactResource.ImportFromCSVStepTwoHeader; ImportFromCSVStepOneDescriptionLabel = CRMContactResource.ImportFromCSVStepOneDescription; ImportFromCSVStepTwoDescriptionLabel = CRMContactResource.ImportFromCSVStepTwoDescription; ImportStartingPanelHeaderLabel = CRMContactResource.ImportStartingPanelHeader; ImportStartingPanelDescriptionLabel = CRMContactResource.ImportStartingPanelDescription; ImportStartingPanelButtonLabel = CRMContactResource.ImportStartingPanelButton; ImportImgSrc = WebImageSupplier.GetAbsoluteWebPath("import_contacts.png", ProductEntryPoint.ID); Page.RegisterClientScript(new Masters.ClientScripts.ImportFromCSVViewDataContacts()); RegisterClientScriptHelper.DataUserSelectorListView(Page, "_ImportContactsManager", null); } protected void InitForOpportunity() { StartImportLabel = CRMDealResource.StartImport; ImportFromCSVStepOneHeaderLabel = CRMDealResource.ImportFromCSVStepOneHeader; ImportFromCSVStepTwoHeaderLabel = CRMDealResource.ImportFromCSVStepTwoHeader; ImportFromCSVStepOneDescriptionLabel = CRMDealResource.ImportFromCSVStepOneDescription; ImportFromCSVStepTwoDescriptionLabel = CRMDealResource.ImportFromCSVStepTwoDescription; // // ImportFromCSVStepTwoDescription ImportStartingPanelHeaderLabel = CRMDealResource.ImportStartingPanelHeader; ImportStartingPanelDescriptionLabel = CRMDealResource.ImportStartingPanelDescription; ImportStartingPanelButtonLabel = CRMDealResource.ImportStartingPanelButton; ImportImgSrc = WebImageSupplier.GetAbsoluteWebPath("import-opportunities.png", ProductEntryPoint.ID); Page.RegisterClientScript(new Masters.ClientScripts.ImportFromCSVViewDataDeals()); var privatePanel = (PrivatePanel)Page.LoadControl(PrivatePanel.Location); privatePanel.CheckBoxLabel = CRMDealResource.PrivatePanelCheckBoxLabel; privatePanel.IsPrivateItem = false; var usersWhoHasAccess = new List<string> { CustomNamingPeople.Substitute<CRMCommonResource>("CurrentUser") }; privatePanel.UsersWhoHasAccess = usersWhoHasAccess; privatePanel.DisabledUsers = new List<Guid> { SecurityContext.CurrentAccount.ID }; privatePanel.HideNotifyPanel = true; _phPrivatePanel.Controls.Add(privatePanel); } protected void InitForTask() { StartImportLabel = CRMTaskResource.StartImport; ImportFromCSVStepOneHeaderLabel = CRMTaskResource.ImportFromCSVStepOneHeader; ImportFromCSVStepTwoHeaderLabel = CRMTaskResource.ImportFromCSVStepTwoHeader; ImportFromCSVStepOneDescriptionLabel = CRMTaskResource.ImportFromCSVStepOneDescription; ImportFromCSVStepTwoDescriptionLabel = CRMTaskResource.ImportFromCSVStepTwoDescription; ImportStartingPanelHeaderLabel = CRMTaskResource.ImportStartingPanelHeader; ImportStartingPanelDescriptionLabel = CRMTaskResource.ImportStartingPanelDescription; ImportStartingPanelButtonLabel = CRMTaskResource.ImportStartingPanelButton; ImportImgSrc = WebImageSupplier.GetAbsoluteWebPath("import-tasks.png", ProductEntryPoint.ID); Page.RegisterClientScript(new Masters.ClientScripts.ImportFromCSVViewDataTasks()); } protected void InitForCase() { StartImportLabel = CRMCasesResource.StartImport; ImportFromCSVStepOneHeaderLabel = CRMCasesResource.ImportFromCSVStepOneHeader; ImportFromCSVStepTwoHeaderLabel = CRMCasesResource.ImportFromCSVStepTwoHeader; ImportFromCSVStepOneDescriptionLabel = CRMCasesResource.ImportFromCSVStepOneDescription; ImportFromCSVStepTwoDescriptionLabel = CRMCasesResource.ImportFromCSVStepTwoDescription; ImportStartingPanelHeaderLabel = CRMCasesResource.ImportStartingPanelHeader; ImportStartingPanelDescriptionLabel = CRMCasesResource.ImportStartingPanelDescription; ImportStartingPanelButtonLabel = CRMCasesResource.ImportStartingPanelButton; ImportImgSrc = WebImageSupplier.GetAbsoluteWebPath("import-cases.png", ProductEntryPoint.ID); Page.RegisterClientScript(new Masters.ClientScripts.ImportFromCSVViewDataCases()); var privatePanel = (PrivatePanel)Page.LoadControl(PrivatePanel.Location); privatePanel.CheckBoxLabel = CRMCasesResource.PrivatePanelCheckBoxLabel; privatePanel.IsPrivateItem = false; var usersWhoHasAccess = new List<string> { CustomNamingPeople.Substitute<CRMCommonResource>("CurrentUser") }; privatePanel.UsersWhoHasAccess = usersWhoHasAccess; privatePanel.DisabledUsers = new List<Guid> { SecurityContext.CurrentAccount.ID }; privatePanel.HideNotifyPanel = true; _phPrivatePanel.Controls.Add(privatePanel); } protected void Page_Load(object sender, EventArgs e) { switch (EntityType) { case EntityType.Contact: GoToRedirectURL = "Default.aspx"; InitForContacts(); break; case EntityType.Opportunity: GoToRedirectURL = "Deals.aspx"; InitForOpportunity(); break; case EntityType.Task: GoToRedirectURL = "Tasks.aspx"; InitForTask(); break; case EntityType.Case: GoToRedirectURL = "Cases.aspx"; InitForCase(); break; } RegisterScript(); } private void RegisterScript() { var sb = new StringBuilder(); sb.AppendFormat(@"ASC.CRM.ImportFromCSVView.init(""{0}"", ""{1}"");", (int)EntityType, EntityType.ToString().ToLower()); Page.RegisterInlineScript(sb.ToString()); } public EncodingInfo[] GetEncodings() { return Encoding.GetEncodings().OrderBy(x => x.DisplayName).ToArray(); } #endregion } }
#if SILVERLIGHT using Microsoft.Silverlight.Testing; using Microsoft.VisualStudio.TestTools.UnitTesting; #elif NETFX_CORE using DevExpress.TestFramework.NUnit; using DevExpress.Mvvm.Tests.TestUtils; using System.Threading.Tasks; using Windows.UI.Xaml.Controls; using Windows.UI.Xaml; using Windows.UI.Xaml.Data; using DevExpress.TestUtils; #else using NUnit.Framework; #endif #if !FREE && !NETFX_CORE using DevExpress.Xpf.Core.Tests; #endif using System; using System.Linq; using System.Windows; using DevExpress.Mvvm.Native; using DevExpress.Mvvm.UI.Interactivity; #if !NETFX_CORE using System.Windows.Controls; using System.Windows.Controls.Primitives; using DevExpress.Mvvm.POCO; using System.Windows.Threading; using System.Windows.Data; #else #endif namespace DevExpress.Mvvm.UI.Tests { [TestFixture] public class EventToCommandTests : BaseWpfFixture { [Test, Asynchronous] #if !NETFX_CORE public void EventToCommandInWindow() { #else public async Task EventToCommandInWindow() { #endif var userControl = new EventToCommandTestView(); Window.Content = userControl; var viewModel = (EventToCommandTestViewModel)userControl.DataContext; Assert.AreEqual(0, viewModel.LoadedCount); Assert.AreEqual(0, viewModel.SelectionChangedCount); #if NETFX_CORE await #endif EnqueueTestWindowMainCallback(() => { foreach(EventToCommand trigger in Interaction.GetBehaviors(userControl)) { Assert.AreEqual(1, trigger.RaiseSourceChangedCount); } Assert.AreEqual(1, viewModel.LoadedCount); Assert.AreEqual("test", viewModel.LoadedParameter); Assert.AreEqual(0, viewModel.DummyCommand.InvokeCount); Assert.AreEqual(1, viewModel.ButtonLoadedCount); Assert.AreEqual(0, viewModel.SelectionChangedCount); userControl.listBox.SelectedIndex = 1; Assert.AreEqual(1, viewModel.SelectionChangedCount); Assert.AreEqual("item2", viewModel.SelectionChangedParameter.AddedItems[0].With(x => x as ListBoxItem).Content); Assert.AreEqual(0, viewModel.SelectionChangedCount2); userControl.listBox2.SelectedIndex = 1; Assert.AreEqual(1, viewModel.SelectionChangedCount2); Assert.AreEqual("foo", viewModel.SelectionChangedParameter2); Assert.AreEqual(0, viewModel.SelectionChangedCount3); userControl.listBox3.SelectedIndex = 1; Assert.AreEqual(1, viewModel.SelectionChangedCount3); Assert.AreEqual("item2", viewModel.SelectionChangedParameter3.With(x => x as ListBoxItem).Content); Assert.IsFalse(userControl.listBox4.IsEnabled); viewModel.SelectionChangedCommandParameter4 = true; Assert.IsTrue(userControl.listBox4.IsEnabled); viewModel.SelectionChangedCommandParameter4 = false; Assert.IsFalse(userControl.listBox4.IsEnabled); var eventToCommand = (EventToCommand)Interaction.GetBehaviors(userControl.listBox4)[0]; eventToCommand.Command = viewModel.SelectionChangedCommand2; Assert.IsTrue(userControl.listBox4.IsEnabled); eventToCommand.AllowChangingEventOwnerIsEnabled = false; eventToCommand.Command = viewModel.SelectionChangedCommand4; Assert.IsTrue(userControl.listBox4.IsEnabled); eventToCommand.AllowChangingEventOwnerIsEnabled = true; Assert.IsFalse(userControl.listBox4.IsEnabled); Assert.IsFalse(userControl.listBox5.IsEnabled); viewModel.SelectionChangedCommandParameter5 = true; Assert.IsTrue(userControl.listBox5.IsEnabled); viewModel.SelectionChangedCommandParameter5 = false; Assert.IsFalse(userControl.listBox5.IsEnabled); eventToCommand = (EventToCommand)Interaction.GetBehaviors(userControl).First(x => ((EventToCommand)x).SourceName == "listBox5"); eventToCommand.Command = viewModel.SelectionChangedCommand2; Assert.IsTrue(userControl.listBox5.IsEnabled); eventToCommand.AllowChangingEventOwnerIsEnabled = false; eventToCommand.Command = viewModel.SelectionChangedCommand5; Assert.IsTrue(userControl.listBox5.IsEnabled); eventToCommand.AllowChangingEventOwnerIsEnabled = true; Assert.IsFalse(userControl.listBox5.IsEnabled); }); EnqueueTestComplete(); } #if !NETFX_CORE [Test, Asynchronous] public void SourceChangedFireCount1() { var panel = new StackPanel(); panel.Children.Add(new Button() { Name = "button1" }); panel.Children.Add(new Button() { Name = "button2" }); int gotFocusCount = 0; var eventToCommand = new EventToCommand() { SourceObject = panel.Children[0], EventName = "GotFocus", Command = DelegateCommandFactory.Create(() => gotFocusCount++, false) }; Interaction.GetBehaviors(panel).Add(eventToCommand); Window.Content = panel; EnqueueShowWindow(); EnqueueCallback(() => { Assert.AreEqual(1, eventToCommand.RaiseSourceChangedCount); eventToCommand.SourceName = "button2"; Assert.AreEqual(1, eventToCommand.RaiseSourceChangedCount); Assert.AreEqual(0, gotFocusCount); ((Button)panel.Children[1]).Focus(); }); EnqueueConditional(() => gotFocusCount == 0); EnqueueCallback(() => { ((Button)panel.Children[0]).Focus(); }); EnqueueConditional(() => gotFocusCount == 1); EnqueueTestComplete(); } #endif [Test, Asynchronous] #if !NETFX_CORE public void SourceChangedFireCount2() { #else public async Task SourceChangedFireCount2() { #endif var panel = new StackPanel(); panel.Children.Add(new Button() { Name = "button2" }); int gotFocusCount = 0; var eventToCommand = new EventToCommand() { EventName = "GotFocus", Command = new DelegateCommand(() => gotFocusCount++) }; Interaction.GetBehaviors(panel).Add(eventToCommand); Window.Content = panel; #if NETFX_CORE await #endif EnqueueTestWindowMainCallback(() => { Assert.AreEqual(1, eventToCommand.RaiseSourceChangedCount); eventToCommand.SourceName = "button2"; }); EnqueueWindowUpdateLayout(); EnqueueCallback(() => { Assert.AreEqual(2, eventToCommand.RaiseSourceChangedCount); }); EnqueueTestComplete(); } [Test, Asynchronous] #if !NETFX_CORE public void B236199_DataContextChangedSubscription() { #else public async Task B236199_DataContextChangedSubscription() { #endif var button = new Button(); int dataContextChangedCount = 0; int dataContextChangedCount2 = 0; button.DataContextChanged += (d, e) => dataContextChangedCount2++; var eventToCommand = new EventToCommand() { EventName = "DataContextChanged", Command = new DelegateCommand(() => dataContextChangedCount++) }; Interaction.GetBehaviors(button).Add(eventToCommand); Window.Content = button; #if NETFX_CORE await #endif EnqueueTestWindowMainCallback(() => { Assert.AreEqual(dataContextChangedCount2, dataContextChangedCount); button.DataContext = "1"; Assert.AreEqual(dataContextChangedCount2, dataContextChangedCount); }); EnqueueTestComplete(); } [Test] public void NameScopeAccessProviderSourceTest() { var window = new ContentControl(); var eventToCommand = new EventToCommand(); var testViewModel = new TestViewModel(); window.Content = testViewModel; int execCount = 0; eventToCommand.Command = new DelegateCommand(() => execCount++); eventToCommand.SourceObject = testViewModel; eventToCommand.EventName = "TestEvent"; Interaction.GetBehaviors(window).Add(eventToCommand); testViewModel.RaiseTestEvent(); Assert.AreEqual(1, execCount); } public class TestViewModel { public event Action<object, object> TestEvent = (o1, o2) => { }; public void RaiseTestEvent() { TestEvent(null, null); } } [Test, Asynchronous] #if !NETFX_CORE public void B250383() { #else public async Task B250383() { #endif var control = new Button(); control.IsEnabled = false; int loaded = 0; var eventToCommand = new EventToCommand() { EventName = "Loaded", Command = new DelegateCommand(() => loaded++) }; Interaction.GetBehaviors(control).Add(eventToCommand); Window.Content = control; #if NETFX_CORE await #endif EnqueueTestWindowMainCallback(() => { Assert.AreEqual(1, loaded); eventToCommand.SourceName = "button2"; }); EnqueueCallback(() => { control = new Button(); control.IsEnabled = false; eventToCommand = new EventToCommand() { EventName = "Loaded", ProcessEventsFromDisabledEventOwner = false, Command = new DelegateCommand(() => loaded++) }; Interaction.GetBehaviors(control).Add(eventToCommand); loaded = 0; Window.Content = control; }); EnqueueWindowUpdateLayout(); EnqueueCallback(() => { Assert.AreEqual(0, loaded); }); EnqueueTestComplete(); } [Test, Asynchronous] #if !NETFX_CORE public void Q539009_1() { #else public async Task Q539009_1() { #endif var control = new Grid(); int counter2 = 0; int counter1 = 0; control.SizeChanged += (d, e) => counter2++; var eventToCommand = new EventToCommand() { EventName = "SizeChanged", Command = new DelegateCommand(() => counter1++), }; Interaction.GetBehaviors(control).Add(eventToCommand); Window.Content = control; #if NETFX_CORE await #endif EnqueueShowWindow(); EnqueueCallback(() => { Assert.AreEqual(counter2, counter1); }); EnqueueTestComplete(); } [Test, Asynchronous] #if !NETFX_CORE public void Q539009_2() { #else public async Task Q539009_2() { #endif var control = new Grid() { Name = "control" }; int counter2 = 0; int counter1 = 0; control.SizeChanged += (d, e) => counter2++; var eventToCommand = new EventToCommand() { SourceName = "control", EventName = "SizeChanged", Command = new DelegateCommand(() => counter1++), }; Interaction.GetBehaviors(control).Add(eventToCommand); Window.Content = control; #if NETFX_CORE await #endif EnqueueShowWindow(); EnqueueCallback(() => { Assert.AreEqual(counter2, counter1); }); EnqueueTestComplete(); } [Test, Asynchronous] #if !NETFX_CORE public void Q539009_3() { #else public async Task Q539009_3() { #endif var control = new Grid() { Name = "control" }; int counter2 = 0; int counter1 = 0; control.SizeChanged += (d, e) => counter2++; var eventToCommand = new EventToCommand() { SourceObject = control, EventName = "SizeChanged", Command = new DelegateCommand(() => counter1++), }; Interaction.GetBehaviors(control).Add(eventToCommand); Window.Content = control; #if NETFX_CORE await #endif EnqueueShowWindow(); EnqueueCallback(() => { Assert.AreEqual(counter2, counter1); }); EnqueueTestComplete(); } [Test, Asynchronous] #if !NETFX_CORE public void Q554072_11() { #else public async Task Q554072_11() { await #endif Q554072_1Core("Loaded"); } [Test, Asynchronous] #if !NETFX_CORE public void Q554072_12() { #else public async Task Q554072_12() { await #endif Q554072_1Core("SizeChanged"); } [Test, Asynchronous] #if !NETFX_CORE public void Q554072_21() { #else public async Task Q554072_21() { await #endif Q554072_2Core("Loaded"); } [Test, Asynchronous] #if !NETFX_CORE public void Q554072_22() { #else public async Task Q554072_22() { await #endif Q554072_2Core("SizeChanged"); } #if !NETFX_CORE void Q554072_1Core(string eventName) { #else async Task Q554072_1Core(string eventName) { #endif var control = new Grid(); var bt = new Button() { Name = "View" }; control.Children.Add(bt); int counter1 = 0; int counter2 = 0; int counter3 = 0; control.Loaded += (d, e) => counter1++; var eventToCommand1 = new EventToCommand() { PassEventArgsToCommand = true, EventName = eventName, Command = new DelegateCommand(() => counter2++), SourceName = "View", }; var eventToCommand2 = new EventToCommand() { PassEventArgsToCommand = true, EventName = eventName, Command = new DelegateCommand(() => counter3++), SourceName = "View", }; Interaction.GetBehaviors(control).Add(eventToCommand1); Interaction.GetBehaviors(bt).Add(eventToCommand2); Window.Content = control; #if NETFX_CORE await #endif EnqueueShowWindow(); EnqueueCallback(() => { Assert.AreEqual(counter2, counter1); Assert.AreEqual(counter3, counter1); }); EnqueueTestComplete(); } #if !NETFX_CORE void Q554072_2Core(string eventName) { #else async Task Q554072_2Core(string eventName) { #endif var control = new Grid(); var bt = new Button() { Name = "View" }; control.Children.Add(bt); int counter1 = 0; int counter2 = 0; control.Loaded += (d, e) => counter1++; var eventToCommand1 = new EventToCommand() { PassEventArgsToCommand = true, EventName = eventName, Command = new DelegateCommand(() => counter2++), }; BindingOperations.SetBinding(eventToCommand1, EventToCommand.SourceObjectProperty, new Binding() { ElementName = "View" }); Interaction.GetBehaviors(control).Add(eventToCommand1); Window.Content = control; #if NETFX_CORE await #endif EnqueueShowWindow(); EnqueueCallback(() => { var evv = eventToCommand1.SourceObject; Assert.AreEqual(counter2, counter1); }); EnqueueTestComplete(); } [Test, Asynchronous] #if !NETFX_CORE public void Q554072_3() { #else public async Task Q554072_3() { #endif var control = new EventToCommandTestView(); Window.Content = control; #if NETFX_CORE await #endif EnqueueShowWindow(); EnqueueCallback(() => { EventToCommandTestViewModel vm = (EventToCommandTestViewModel)control.DataContext; #if !NETFX_CORE Assert.AreEqual(2, vm.Q554072CommandCount); #else Assert.IsTrue(vm.Q554072CommandCount > 1); #endif }); EnqueueTestComplete(); } #if !SILVERLIGHT && !NETFX_CORE [Test] public void SetEvent_CheckEventNameIsReset_SetEventName_CheckEventIsReset() { EventToCommand eventToCommand = new EventToCommand(); Assert.IsNotNull(eventToCommand.EventName); eventToCommand.Event = Validation.ErrorEvent; Assert.IsNull(eventToCommand.EventName); eventToCommand.EventName = "Unloaded"; Assert.IsNull(eventToCommand.Event); } [Test] public void SetEvent_RaiseEvent_CheckCommandExecuted() { bool commandExecuted = false; EventToCommand eventToCommand = new EventToCommand() { Command = new DelegateCommand(() => { commandExecuted = true; }) }; Button button = new Button(); eventToCommand.Event = Button.ClickEvent; eventToCommand.Attach(button); button.RaiseEvent(new RoutedEventArgs(Button.ClickEvent, button)); EnqueueWindowUpdateLayout(); Assert.IsTrue(commandExecuted); } #endif #if !NETFX_CORE [Test, Asynchronous] public void EventSenderTest1() { EventSenderTestCore(true, true); } [Test, Asynchronous] public void EventSenderTest2() { EventSenderTestCore(false, true); } [Test, Asynchronous] public void EventSenderTest3() { EventSenderTestCore(true, false); } [Test, Asynchronous] public void EventSenderTest4() { EventSenderTestCore(false, false); } public void EventSenderTestCore(bool addControlFirst, bool linearLayout) { Grid rootRoot = new Grid(); Grid root = new Grid(); Button bt = null; if(addControlFirst) { if(linearLayout) { bt = new Button() { Name = "bt" }; root.Children.Add(bt); } else { bt = new Button() { Name = "bt" }; rootRoot.Children.Add(bt); rootRoot.Children.Add(root); } } EventToCommandTestClass eventToCommand1 = new EventToCommandTestClass() { EventName = "SizeChanged" }; EventToCommandTestClass eventToCommand2 = new EventToCommandTestClass() { EventName = "SizeChanged" }; BindingOperations.SetBinding(eventToCommand2, EventToCommand.SourceObjectProperty, new Binding() { ElementName = "bt" }); EventToCommandTestClass eventToCommand3 = new EventToCommandTestClass() { EventName = "SizeChanged", SourceName = "bt" }; EventToCommandTestClass eventToCommand4 = new EventToCommandTestClass() { EventName = "Loaded" }; EventToCommandTestClass eventToCommand5 = new EventToCommandTestClass() { EventName = "Loaded" }; BindingOperations.SetBinding(eventToCommand5, EventToCommand.SourceObjectProperty, new Binding() { ElementName = "bt" }); EventToCommandTestClass eventToCommand6 = new EventToCommandTestClass() { EventName = "Loaded", SourceName = "bt" }; Interaction.GetBehaviors(root).Add(eventToCommand1); Interaction.GetBehaviors(root).Add(eventToCommand2); Interaction.GetBehaviors(root).Add(eventToCommand3); Interaction.GetBehaviors(root).Add(eventToCommand4); Interaction.GetBehaviors(root).Add(eventToCommand5); Interaction.GetBehaviors(root).Add(eventToCommand6); if(!addControlFirst) { if(linearLayout) { bt = new Button() { Name = "bt" }; root.Children.Add(bt); } else { bt = new Button() { Name = "bt" }; rootRoot.Children.Add(bt); rootRoot.Children.Add(root); } } if(linearLayout) Window.Content = root; else Window.Content = rootRoot; EnqueueShowWindow(); EnqueueWindowUpdateLayout(); EnqueueCallback(() => { Assert.AreEqual(1, eventToCommand1.EventCount); Assert.AreEqual(root, eventToCommand1.EventSender); Assert.AreEqual(EventToCommandType.AssociatedObject, eventToCommand1.Type); Assert.AreEqual(1, eventToCommand2.EventCount); Assert.AreEqual(bt, eventToCommand2.EventSender); Assert.AreEqual(EventToCommandType.SourceObject, eventToCommand2.Type); Assert.AreEqual(1, eventToCommand3.EventCount); Assert.AreEqual(bt, eventToCommand3.EventSender); Assert.AreEqual(EventToCommandType.SourceName, eventToCommand3.Type); Assert.AreEqual(1, eventToCommand4.EventCount); Assert.AreEqual(root, eventToCommand4.EventSender); Assert.AreEqual(EventToCommandType.AssociatedObject, eventToCommand4.Type); Assert.AreEqual(1, eventToCommand5.EventCount); Assert.AreEqual(bt, eventToCommand5.EventSender); Assert.AreEqual(EventToCommandType.SourceObject, eventToCommand5.Type); Assert.AreEqual(1, eventToCommand6.EventCount); Assert.AreEqual(bt, eventToCommand6.EventSender); Assert.AreEqual(EventToCommandType.SourceName, eventToCommand6.Type); }); EnqueueTestComplete(); } #else [Test, Asynchronous] public async Task EventSenderTest1() { await EventSenderTestCore(true, true); } [Test, Asynchronous] public async Task EventSenderTest2() { await EventSenderTestCore(false, true); } [Test, Asynchronous] public async Task EventSenderTest3() { await EventSenderTestCore(true, false); } [Test, Asynchronous] public async Task EventSenderTest4() { await EventSenderTestCore(false, false); } class TestButton : Button { public TestButton() { Loaded += TestButton_Loaded; } async void TestButton_Loaded(object sender, RoutedEventArgs e) { await Task.Delay(35); Width *= 2; } } public async Task EventSenderTestCore(bool addControlFirst, bool linearLayout) { Grid rootRoot = new Grid(); Grid root = new Grid(); Button bt = null; if(addControlFirst) { if(linearLayout) { bt = new TestButton() { Name = "bt" }; root.Children.Add(bt); } else { bt = new TestButton() { Name = "bt" }; rootRoot.Children.Add(bt); rootRoot.Children.Add(root); } } EventToCommandTestClass eventToCommand1 = new EventToCommandTestClass() { EventName = "SizeChanged" }; EventToCommandTestClass eventToCommand2 = new EventToCommandTestClass() { EventName = "SizeChanged" }; BindingOperations.SetBinding(eventToCommand2, EventToCommand.SourceObjectProperty, new Binding() { ElementName = "bt" }); EventToCommandTestClass eventToCommand3 = new EventToCommandTestClass() { EventName = "SizeChanged", SourceName = "bt" }; EventToCommandTestClass eventToCommand4 = new EventToCommandTestClass() { EventName = "Loaded" }; EventToCommandTestClass eventToCommand5 = new EventToCommandTestClass() { EventName = "Loaded" }; BindingOperations.SetBinding(eventToCommand5, EventToCommand.SourceObjectProperty, new Binding() { ElementName = "bt" }); EventToCommandTestClass eventToCommand6 = new EventToCommandTestClass() { EventName = "Loaded", SourceName = "bt" }; Interaction.GetBehaviors(root).Add(eventToCommand1); Interaction.GetBehaviors(root).Add(eventToCommand2); Interaction.GetBehaviors(root).Add(eventToCommand3); Interaction.GetBehaviors(root).Add(eventToCommand4); Interaction.GetBehaviors(root).Add(eventToCommand5); Interaction.GetBehaviors(root).Add(eventToCommand6); if(!addControlFirst) { if(linearLayout) { bt = new TestButton() { Name = "bt" }; root.Children.Add(bt); } else { bt = new TestButton() { Name = "bt" }; rootRoot.Children.Add(bt); rootRoot.Children.Add(root); } } bt.Loaded += bt_Loaded; bt.SizeChanged += bt_SizeChanged; if(linearLayout) Window.Content = root; else Window.Content = rootRoot; await EnqueueShowWindow(); EnqueueWindowUpdateLayout(); EnqueueCallback(() => { Assert.AreEqual(1, eventToCommand1.EventCount); Assert.AreEqual(root, eventToCommand1.EventSender); Assert.AreEqual(EventToCommandType.AssociatedObject, eventToCommand1.Type); Assert.IsTrue(eventToCommand2.EventCount > 0); Assert.AreEqual(bt, eventToCommand2.EventSender); Assert.AreEqual(EventToCommandType.SourceObject, eventToCommand2.Type); Assert.AreEqual(1, eventToCommand3.EventCount); Assert.AreEqual(bt, eventToCommand3.EventSender); Assert.AreEqual(EventToCommandType.SourceName, eventToCommand3.Type); Assert.AreEqual(1, eventToCommand4.EventCount); Assert.AreEqual(root, eventToCommand4.EventSender); Assert.AreEqual(EventToCommandType.AssociatedObject, eventToCommand4.Type); Assert.AreEqual(1, eventToCommand5.EventCount); Assert.AreEqual(bt, eventToCommand5.EventSender); Assert.AreEqual(EventToCommandType.SourceObject, eventToCommand5.Type); Assert.AreEqual(1, eventToCommand6.EventCount); Assert.AreEqual(bt, eventToCommand6.EventSender); Assert.AreEqual(EventToCommandType.SourceName, eventToCommand6.Type); }); EnqueueTestComplete(); } void bt_SizeChanged(object sender, SizeChangedEventArgs e) { } void bt_Loaded(object sender, RoutedEventArgs e) { } #endif [Test] public void EventArgsConverter_PassEventArgsToCommand() { var button = new Button(); int dataContextChangedCount = 0; int dataContextChangedCount2 = 0; button.DataContextChanged += (d, e) => dataContextChangedCount2++; var eventArgsConverter = new EventArgsConverterTestClass(); var eventToCommand = new EventToCommand() { EventName = "DataContextChanged", Command = new DelegateCommand(() => dataContextChangedCount++), EventArgsConverter = eventArgsConverter }; Interaction.GetBehaviors(button).Add(eventToCommand); Assert.AreEqual(dataContextChangedCount2, dataContextChangedCount); Assert.AreEqual(0, eventArgsConverter.Count); button.DataContext = "1"; Assert.AreEqual(1, dataContextChangedCount); Assert.AreEqual(dataContextChangedCount2, dataContextChangedCount); Assert.AreEqual(1, eventArgsConverter.Count); eventToCommand.PassEventArgsToCommand = false; button.DataContext = "2"; Assert.AreEqual(2, dataContextChangedCount); Assert.AreEqual(dataContextChangedCount2, dataContextChangedCount); #if !NETFX_CORE Assert.AreEqual(1, eventArgsConverter.Count); #else Assert.AreEqual(2, eventArgsConverter.Count); eventToCommand.EventArgsConverter = null; button.DataContext = "2"; Assert.AreEqual(3, dataContextChangedCount); Assert.AreEqual(dataContextChangedCount2, dataContextChangedCount); Assert.AreEqual(2, eventArgsConverter.Count); #endif } #if !NETFX_CORE void DispatcherTestCore(Action<EventToCommand> eventToCommandInitializer, bool checkImmediately) { #else async Task DispatcherTestCore(Action<EventToCommand> eventToCommandInitializer, bool checkImmediately) { #endif var button = new Button(); int dataContextChangedCount = 0; int dataContextChangedCount2 = 0; button.DataContextChanged += (d, e) => dataContextChangedCount2++; var eventToCommand = new EventToCommand() { EventName = "DataContextChanged", Command = new DelegateCommand(() => dataContextChangedCount++), #if !SILVERLIGHT && !NETFX_CORE DispatcherPriority = DispatcherPriority.Render, #endif }; eventToCommandInitializer(eventToCommand); Interaction.GetBehaviors(button).Add(eventToCommand); Window.Content = button; Assert.AreEqual(dataContextChangedCount2, dataContextChangedCount); button.DataContext = "1"; if(!checkImmediately) Assert.AreEqual(0, dataContextChangedCount); else Assert.AreEqual(1, dataContextChangedCount); Assert.AreEqual(1, dataContextChangedCount2); #if NETFX_CORE await #endif EnqueueShowWindow(); EnqueueWindowUpdateLayout(); EnqueueCallback(() => { #if !NETFX_CORE Assert.AreEqual(1, dataContextChangedCount); #else Assert.AreEqual(dataContextChangedCount2, dataContextChangedCount); #endif }); EnqueueTestComplete(); } #if !SILVERLIGHT && !NETFX_CORE [Test] public void DispatcherDefaultValues() { EventToCommand eventToCommand = new EventToCommand(); Assert.AreEqual(null, eventToCommand.UseDispatcher); Assert.AreEqual(null, eventToCommand.DispatcherPriority); Assert.AreEqual(false, eventToCommand.ActualUseDispatcher); Assert.AreEqual(DispatcherPriority.Normal, eventToCommand.ActualDispatcherPriority); eventToCommand.DispatcherPriority = DispatcherPriority.Normal; Assert.AreEqual(true, eventToCommand.ActualUseDispatcher); Assert.AreEqual(DispatcherPriority.Normal, eventToCommand.ActualDispatcherPriority); eventToCommand.DispatcherPriority = DispatcherPriority.Render; Assert.AreEqual(DispatcherPriority.Render, eventToCommand.ActualDispatcherPriority); eventToCommand.UseDispatcher = false; Assert.AreEqual(false, eventToCommand.ActualUseDispatcher); eventToCommand.DispatcherPriority = null; eventToCommand.UseDispatcher = true; Assert.AreEqual(true, eventToCommand.ActualUseDispatcher); Assert.AreEqual(DispatcherPriority.Normal, eventToCommand.ActualDispatcherPriority); } [Test, Asynchronous] public void NotNullDispatcherPriority_NullUseDispatcher() { DispatcherTestCore(x => { x.DispatcherPriority = DispatcherPriority.Render; }, false); } #endif [Test, Asynchronous] #if !NETFX_CORE public void TrueUseDispatcher_NullDispatcherPriority() { #else public async Task TrueUseDispatcher_NullDispatcherPriority() { await #endif DispatcherTestCore(x => { x.UseDispatcher = true; }, false); } #if !SILVERLIGHT && !NETFX_CORE [Test, Asynchronous] public void MarkRoutedEventsAsHandled() { var button = new Button() { Name = "View" }; int counter1 = 0; int counter2 = 0; int counter3 = 0; button.Loaded += (d, e) => counter1++; var eventToCommand1 = new EventToCommand() { PassEventArgsToCommand = true, EventName = "Loaded", Command = new DelegateCommand(() => counter2++), SourceName = "View", MarkRoutedEventsAsHandled = true, }; var eventToCommand2 = new EventToCommand() { PassEventArgsToCommand = true, EventName = "Loaded", Command = new DelegateCommand(() => counter3++), SourceName = "View", MarkRoutedEventsAsHandled = true, }; Interaction.GetBehaviors(button).Add(eventToCommand1); Interaction.GetBehaviors(button).Add(eventToCommand2); Window.Content = button; EnqueueShowWindow(); EnqueueCallback(() => { Assert.AreEqual(1, counter1); Assert.AreEqual(1, counter2); Assert.AreEqual(0, counter3); }); EnqueueTestComplete(); } #endif public enum EventToCommandType { AssociatedObject, SourceName, SourceObject } public class EventToCommandTestClass : EventToCommand { public EventToCommandType Type { get; set; } public int EventCount { get; set; } public object EventSender { get; set; } protected override void OnEvent(object sender, object eventArgs) { base.OnEvent(sender, eventArgs); EventCount++; if(SourceName != null) Type = EventToCommandType.SourceName; else if(Source == AssociatedObject) Type = EventToCommandType.AssociatedObject; else Type = EventToCommandType.SourceObject; EventSender = sender; } } public class EventArgsConverterTestClass : IEventArgsConverter { public int Count { get; set; } public object Convert(object sender, object args) { Count++; return null; } } } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! namespace Google.Apis.ChromeUXReport.v1 { /// <summary>The ChromeUXReport Service.</summary> public class ChromeUXReportService : Google.Apis.Services.BaseClientService { /// <summary>The API version.</summary> public const string Version = "v1"; /// <summary>The discovery version used to generate this service.</summary> public static Google.Apis.Discovery.DiscoveryVersion DiscoveryVersionUsed = Google.Apis.Discovery.DiscoveryVersion.Version_1_0; /// <summary>Constructs a new service.</summary> public ChromeUXReportService() : this(new Google.Apis.Services.BaseClientService.Initializer()) { } /// <summary>Constructs a new service.</summary> /// <param name="initializer">The service initializer.</param> public ChromeUXReportService(Google.Apis.Services.BaseClientService.Initializer initializer) : base(initializer) { Records = new RecordsResource(this); } /// <summary>Gets the service supported features.</summary> public override System.Collections.Generic.IList<string> Features => new string[0]; /// <summary>Gets the service name.</summary> public override string Name => "chromeuxreport"; /// <summary>Gets the service base URI.</summary> public override string BaseUri => #if NETSTANDARD1_3 || NETSTANDARD2_0 || NET45 BaseUriOverride ?? "https://chromeuxreport.googleapis.com/"; #else "https://chromeuxreport.googleapis.com/"; #endif /// <summary>Gets the service base path.</summary> public override string BasePath => ""; #if !NET40 /// <summary>Gets the batch base URI; <c>null</c> if unspecified.</summary> public override string BatchUri => "https://chromeuxreport.googleapis.com/batch"; /// <summary>Gets the batch base path; <c>null</c> if unspecified.</summary> public override string BatchPath => "batch"; #endif /// <summary>Gets the Records resource.</summary> public virtual RecordsResource Records { get; } } /// <summary>A base abstract class for ChromeUXReport requests.</summary> public abstract class ChromeUXReportBaseServiceRequest<TResponse> : Google.Apis.Requests.ClientServiceRequest<TResponse> { /// <summary>Constructs a new ChromeUXReportBaseServiceRequest instance.</summary> protected ChromeUXReportBaseServiceRequest(Google.Apis.Services.IClientService service) : base(service) { } /// <summary>V1 error format.</summary> [Google.Apis.Util.RequestParameterAttribute("$.xgafv", Google.Apis.Util.RequestParameterType.Query)] public virtual System.Nullable<XgafvEnum> Xgafv { get; set; } /// <summary>V1 error format.</summary> public enum XgafvEnum { /// <summary>v1 error format</summary> [Google.Apis.Util.StringValueAttribute("1")] Value1 = 0, /// <summary>v2 error format</summary> [Google.Apis.Util.StringValueAttribute("2")] Value2 = 1, } /// <summary>OAuth access token.</summary> [Google.Apis.Util.RequestParameterAttribute("access_token", Google.Apis.Util.RequestParameterType.Query)] public virtual string AccessToken { get; set; } /// <summary>Data format for response.</summary> [Google.Apis.Util.RequestParameterAttribute("alt", Google.Apis.Util.RequestParameterType.Query)] public virtual System.Nullable<AltEnum> Alt { get; set; } /// <summary>Data format for response.</summary> public enum AltEnum { /// <summary>Responses with Content-Type of application/json</summary> [Google.Apis.Util.StringValueAttribute("json")] Json = 0, /// <summary>Media download with context-dependent Content-Type</summary> [Google.Apis.Util.StringValueAttribute("media")] Media = 1, /// <summary>Responses with Content-Type of application/x-protobuf</summary> [Google.Apis.Util.StringValueAttribute("proto")] Proto = 2, } /// <summary>JSONP</summary> [Google.Apis.Util.RequestParameterAttribute("callback", Google.Apis.Util.RequestParameterType.Query)] public virtual string Callback { get; set; } /// <summary>Selector specifying which fields to include in a partial response.</summary> [Google.Apis.Util.RequestParameterAttribute("fields", Google.Apis.Util.RequestParameterType.Query)] public virtual string Fields { get; set; } /// <summary> /// API key. Your API key identifies your project and provides you with API access, quota, and reports. Required /// unless you provide an OAuth 2.0 token. /// </summary> [Google.Apis.Util.RequestParameterAttribute("key", Google.Apis.Util.RequestParameterType.Query)] public virtual string Key { get; set; } /// <summary>OAuth 2.0 token for the current user.</summary> [Google.Apis.Util.RequestParameterAttribute("oauth_token", Google.Apis.Util.RequestParameterType.Query)] public virtual string OauthToken { get; set; } /// <summary>Returns response with indentations and line breaks.</summary> [Google.Apis.Util.RequestParameterAttribute("prettyPrint", Google.Apis.Util.RequestParameterType.Query)] public virtual System.Nullable<bool> PrettyPrint { get; set; } /// <summary> /// Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a /// user, but should not exceed 40 characters. /// </summary> [Google.Apis.Util.RequestParameterAttribute("quotaUser", Google.Apis.Util.RequestParameterType.Query)] public virtual string QuotaUser { get; set; } /// <summary>Legacy upload protocol for media (e.g. "media", "multipart").</summary> [Google.Apis.Util.RequestParameterAttribute("uploadType", Google.Apis.Util.RequestParameterType.Query)] public virtual string UploadType { get; set; } /// <summary>Upload protocol for media (e.g. "raw", "multipart").</summary> [Google.Apis.Util.RequestParameterAttribute("upload_protocol", Google.Apis.Util.RequestParameterType.Query)] public virtual string UploadProtocol { get; set; } /// <summary>Initializes ChromeUXReport parameter list.</summary> protected override void InitParameters() { base.InitParameters(); RequestParameters.Add("$.xgafv", new Google.Apis.Discovery.Parameter { Name = "$.xgafv", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add("access_token", new Google.Apis.Discovery.Parameter { Name = "access_token", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add("alt", new Google.Apis.Discovery.Parameter { Name = "alt", IsRequired = false, ParameterType = "query", DefaultValue = "json", Pattern = null, }); RequestParameters.Add("callback", new Google.Apis.Discovery.Parameter { Name = "callback", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add("fields", new Google.Apis.Discovery.Parameter { Name = "fields", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add("key", new Google.Apis.Discovery.Parameter { Name = "key", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add("oauth_token", new Google.Apis.Discovery.Parameter { Name = "oauth_token", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add("prettyPrint", new Google.Apis.Discovery.Parameter { Name = "prettyPrint", IsRequired = false, ParameterType = "query", DefaultValue = "true", Pattern = null, }); RequestParameters.Add("quotaUser", new Google.Apis.Discovery.Parameter { Name = "quotaUser", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add("uploadType", new Google.Apis.Discovery.Parameter { Name = "uploadType", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add("upload_protocol", new Google.Apis.Discovery.Parameter { Name = "upload_protocol", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); } } /// <summary>The "records" collection of methods.</summary> public class RecordsResource { private const string Resource = "records"; /// <summary>The service which this resource belongs to.</summary> private readonly Google.Apis.Services.IClientService service; /// <summary>Constructs a new resource.</summary> public RecordsResource(Google.Apis.Services.IClientService service) { this.service = service; } /// <summary> /// Queries the Chrome User Experience for a single `record` for a given site. Returns a `record` that contains /// one or more `metrics` corresponding to performance data about the requested site. /// </summary> /// <param name="body">The body of the request.</param> public virtual QueryRecordRequest QueryRecord(Google.Apis.ChromeUXReport.v1.Data.QueryRequest body) { return new QueryRecordRequest(service, body); } /// <summary> /// Queries the Chrome User Experience for a single `record` for a given site. Returns a `record` that contains /// one or more `metrics` corresponding to performance data about the requested site. /// </summary> public class QueryRecordRequest : ChromeUXReportBaseServiceRequest<Google.Apis.ChromeUXReport.v1.Data.QueryResponse> { /// <summary>Constructs a new QueryRecord request.</summary> public QueryRecordRequest(Google.Apis.Services.IClientService service, Google.Apis.ChromeUXReport.v1.Data.QueryRequest body) : base(service) { Body = body; InitParameters(); } /// <summary>Gets or sets the body of this request.</summary> Google.Apis.ChromeUXReport.v1.Data.QueryRequest Body { get; set; } /// <summary>Returns the body of the request.</summary> protected override object GetBody() => Body; /// <summary>Gets the method name.</summary> public override string MethodName => "queryRecord"; /// <summary>Gets the HTTP method.</summary> public override string HttpMethod => "POST"; /// <summary>Gets the REST path.</summary> public override string RestPath => "v1/records:queryRecord"; /// <summary>Initializes QueryRecord parameter list.</summary> protected override void InitParameters() { base.InitParameters(); } } } } namespace Google.Apis.ChromeUXReport.v1.Data { /// <summary> /// A bin is a discrete portion of data spanning from start to end, or if no end is given, then from start to +inf. /// A bin's start and end values are given in the value type of the metric it represents. For example, "first /// contentful paint" is measured in milliseconds and exposed as ints, therefore its metric bins will use int32s for /// its start and end types. However, "cumulative layout shift" is measured in unitless decimals and is exposed as a /// decimal encoded as a string, therefore its metric bins will use strings for its value type. /// </summary> public class Bin : Google.Apis.Requests.IDirectResponseSchema { /// <summary>The proportion of users that experienced this bin's value for the given metric.</summary> [Newtonsoft.Json.JsonPropertyAttribute("density")] public virtual System.Nullable<double> Density { get; set; } /// <summary> /// End is the end of the data bin. If end is not populated, then the bin has no end and is valid from start to /// +inf. /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("end")] public virtual object End { get; set; } /// <summary>Start is the beginning of the data bin.</summary> [Newtonsoft.Json.JsonPropertyAttribute("start")] public virtual object Start { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary>Key defines all the dimensions that identify this record as unique.</summary> public class Key : Google.Apis.Requests.IDirectResponseSchema { /// <summary> /// The effective connection type is the general connection class that all users experienced for this record. /// This field uses the values ["offline", "slow-2G", "2G", "3G", "4G"] as specified in: /// https://wicg.github.io/netinfo/#effective-connection-types If the effective connection type is unspecified, /// then aggregated data over all effective connection types will be returned. /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("effectiveConnectionType")] public virtual string EffectiveConnectionType { get; set; } /// <summary> /// The form factor is the device class that all users used to access the site for this record. If the form /// factor is unspecified, then aggregated data over all form factors will be returned. /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("formFactor")] public virtual string FormFactor { get; set; } /// <summary> /// Origin specifies the origin that this record is for. Note: When specifying an origin, data for loads under /// this origin over all pages are aggregated into origin level user experience data. /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("origin")] public virtual string Origin { get; set; } /// <summary> /// Url specifies a specific url that this record is for. Note: When specifying a "url" only data for that /// specific url will be aggregated. /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("url")] public virtual string Url { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary> /// A `metric` is a set of user experience data for a single web performance metric, like "first contentful paint". /// It contains a summary histogram of real world Chrome usage as a series of `bins`. /// </summary> public class Metric : Google.Apis.Requests.IDirectResponseSchema { /// <summary> /// The histogram of user experiences for a metric. The histogram will have at least one bin and the densities /// of all bins will add up to ~1. /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("histogram")] public virtual System.Collections.Generic.IList<Bin> Histogram { get; set; } /// <summary> /// Common useful percentiles of the Metric. The value type for the percentiles will be the same as the value /// types given for the Histogram bins. /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("percentiles")] public virtual Percentiles Percentiles { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary> /// Percentiles contains synthetic values of a metric at a given statistical percentile. These are used for /// estimating a metric's value as experienced by a percentage of users out of the total number of users. /// </summary> public class Percentiles : Google.Apis.Requests.IDirectResponseSchema { /// <summary>75% of users experienced the given metric at or below this value.</summary> [Newtonsoft.Json.JsonPropertyAttribute("p75")] public virtual object P75 { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary> /// Request payload sent by a physical web client. This request includes all necessary context to load a particular /// user experience record. /// </summary> public class QueryRequest : Google.Apis.Requests.IDirectResponseSchema { /// <summary> /// The effective connection type is a query dimension that specifies the effective network class that the /// record's data should belong to. This field uses the values ["offline", "slow-2G", "2G", "3G", "4G"] as /// specified in: https://wicg.github.io/netinfo/#effective-connection-types Note: If no effective connection /// type is specified, then a special record with aggregated data over all effective connection types will be /// returned. /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("effectiveConnectionType")] public virtual string EffectiveConnectionType { get; set; } /// <summary> /// The form factor is a query dimension that specifies the device class that the record's data should belong /// to. Note: If no form factor is specified, then a special record with aggregated data over all form factors /// will be returned. /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("formFactor")] public virtual string FormFactor { get; set; } /// <summary> /// The metrics that should be included in the response. If none are specified then any metrics found will be /// returned. Allowed values: ["first_contentful_paint", "first_input_delay", "largest_contentful_paint", /// "cumulative_layout_shift", "experimental_uncapped_cumulative_layout_shift"] /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("metrics")] public virtual System.Collections.Generic.IList<string> Metrics { get; set; } /// <summary> /// The url pattern "origin" refers to a url pattern that is the origin of a website. Examples: /// "https://example.com", "https://cloud.google.com" /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("origin")] public virtual string Origin { get; set; } /// <summary> /// The url pattern "url" refers to a url pattern that is any arbitrary url. Examples: "https://example.com/", /// "https://cloud.google.com/why-google-cloud/" /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("url")] public virtual string Url { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary> /// Response payload sent back to a physical web client. This response contains the record found based on the /// identiers present in a `QueryRequest`. The returned response will have a record, and sometimes details on /// normalization actions taken on the request that were necessary to make the request successful. /// </summary> public class QueryResponse : Google.Apis.Requests.IDirectResponseSchema { /// <summary>The record that was found.</summary> [Newtonsoft.Json.JsonPropertyAttribute("record")] public virtual Record Record { get; set; } /// <summary> /// These are details about automated normalization actions that were taken in order to make the requested /// `url_pattern` valid. /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("urlNormalizationDetails")] public virtual UrlNormalization UrlNormalizationDetails { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary> /// Record is a single Chrome UX report data record. It contains use experience statistics for a single url pattern /// and set of dimensions. /// </summary> public class Record : Google.Apis.Requests.IDirectResponseSchema { /// <summary> /// Key defines all of the unique querying parameters needed to look up a user experience record. /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("key")] public virtual Key Key { get; set; } /// <summary> /// Metrics is the map of user experience data available for the record defined in the key field. Metrics are /// keyed on the metric name. Allowed key values: ["first_contentful_paint", "first_input_delay", /// "largest_contentful_paint", "cumulative_layout_shift"] /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("metrics")] public virtual System.Collections.Generic.IDictionary<string, Metric> Metrics { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary> /// Object representing the normalization actions taken to normalize a url to achieve a higher chance of successful /// lookup. These are simple automated changes that are taken when looking up the provided `url_patten` would be /// known to fail. Complex actions like following redirects are not handled. /// </summary> public class UrlNormalization : Google.Apis.Requests.IDirectResponseSchema { /// <summary> /// The URL after any normalization actions. This is a valid user experience URL that could reasonably be looked /// up. /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("normalizedUrl")] public virtual string NormalizedUrl { get; set; } /// <summary>The original requested URL prior to any normalization actions.</summary> [Newtonsoft.Json.JsonPropertyAttribute("originalUrl")] public virtual string OriginalUrl { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } }
// Licensed to the .NET Foundation under one or more agreements. // See the LICENSE file in the project root for more information // // XmlLicenseTransformTest.cs - Test Cases for XmlLicenseTransform // // Author: // original: // Sebastien Pouliot <[email protected]> // Aleksey Sanin ([email protected]) // this file: // Gert Driesen <[email protected]> // // (C) 2003 Aleksey Sanin ([email protected]) // (C) 2004 Novell (http://www.novell.com) // (C) 2008 Gert Driesen // // Licensed to the .NET Foundation under one or more agreements. // See the LICENSE file in the project root for more information. using System.IO; using System.Xml; using Xunit; namespace System.Security.Cryptography.Xml.Tests { public class UnprotectedXmlLicenseTransform : XmlLicenseTransform { public XmlNodeList UnprotectedGetInnerXml() { return base.GetInnerXml(); } } public class DummyDecryptor : IRelDecryptor { public string ContentToReturn { get; set; } public Stream Decrypt(EncryptionMethod encryptionMethod, KeyInfo keyInfo, Stream toDecrypt) { MemoryStream stream = new MemoryStream(); StreamWriter writer = new StreamWriter(stream); writer.Write(ContentToReturn); writer.Flush(); stream.Position = 0; return stream; } } public class XmlLicenseTransformTest { public const string LicenseTransformNsUrl = "urn:mpeg:mpeg21:2003:01-REL-R-NS"; public const string LicenseTransformUrl = LicenseTransformNsUrl + ":licenseTransform"; private UnprotectedXmlLicenseTransform transform; public XmlLicenseTransformTest() { transform = new UnprotectedXmlLicenseTransform(); } [Fact] // ctor () public void Constructor1() { Assert.Equal(LicenseTransformUrl, transform.Algorithm); Assert.Null(transform.Decryptor); Type[] input = transform.InputTypes; Assert.Equal(1, input.Length); Assert.Equal(typeof(XmlDocument), input[0]); Type[] output = transform.OutputTypes; Assert.Equal(1, output.Length); Assert.Equal(typeof(XmlDocument), output[0]); } [Fact] public void InputTypes() { // property does not return a clone transform.InputTypes[0] = null; Assert.Null(transform.InputTypes[0]); // it's not a static array transform = new UnprotectedXmlLicenseTransform(); Assert.NotNull(transform.InputTypes[0]); } [Fact] public void GetInnerXml() { XmlNodeList xnl = transform.UnprotectedGetInnerXml(); Assert.Null(xnl); } [Fact] public void OutputTypes() { // property does not return a clone transform.OutputTypes[0] = null; Assert.Null(transform.OutputTypes[0]); // it's not a static array transform = new UnprotectedXmlLicenseTransform(); Assert.NotNull(transform.OutputTypes[0]); } [Fact] public void Context_Null() { XmlDocument doc = GetDocumentFromResource("System.Security.Cryptography.Xml.Tests.XmlLicenseSample.xml"); Assert.Throws<CryptographicException>(() => transform.LoadInput(doc)); } [Fact] public void NoLicenseXml() { XmlDocument doc = new XmlDocument(); doc.LoadXml("<root />"); transform.Context = doc.DocumentElement; Assert.Throws<CryptographicException>(() => transform.LoadInput(doc)); } [Fact] public void Decryptor_Null() { XmlDocument doc = GetDocumentFromResource("System.Security.Cryptography.Xml.Tests.XmlLicenseSample.xml"); XmlNamespaceManager namespaceManager = new XmlNamespaceManager(doc.NameTable); namespaceManager.AddNamespace("r", "urn:mpeg:mpeg21:2003:01-REL-R-NS"); transform.Context = doc.DocumentElement.SelectSingleNode("//r:issuer[1]", namespaceManager) as XmlElement; Assert.Throws<CryptographicException>(() => transform.LoadInput(doc)); } [Fact] [SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, "https://github.com/dotnet/corefx/issues/19410")] public void ValidLicense() { XmlDocument doc = GetDocumentFromResource("System.Security.Cryptography.Xml.Tests.XmlLicenseSample.xml"); XmlNamespaceManager namespaceManager = new XmlNamespaceManager(doc.NameTable); namespaceManager.AddNamespace("r", "urn:mpeg:mpeg21:2003:01-REL-R-NS"); transform.Context = doc.DocumentElement.SelectSingleNode("//r:issuer[1]", namespaceManager) as XmlElement; DummyDecryptor decryptor = new DummyDecryptor { ContentToReturn = "Encrypted Content" }; transform.Decryptor = decryptor; transform.LoadInput(doc); XmlDocument output = transform.GetOutput(typeof(XmlDocument)) as XmlDocument; string decodedXml = @"<r:license xmlns:r=""urn:mpeg:mpeg21:2003:01-REL-R-NS"" licenseId=""{00000000-0000-0000-0000-123456789012}"">"; decodedXml += "<r:title>Test License</r:title><r:grant>Encrypted Content</r:grant>"; decodedXml += "<r:issuer><r:details><r:timeOfIssue>2017-01-71T00:00:00Z</r:timeOfIssue></r:details></r:issuer></r:license>"; Assert.NotNull(output); Assert.Equal(decodedXml, output.OuterXml); } [Fact] public void GetOutput_InvalidType() { AssertExtensions.Throws<ArgumentException>("type", () => transform.GetOutput(typeof(string))); } [Fact] public static void ItDecryptsLicense() { using (var key = RSA.Create()) { string expected; string encryptedLicenseWithGrants = GenerateLicenseXmlWithEncryptedGrants(key, out expected); Assert.Contains("hello!", expected); Assert.DoesNotContain("hello!", encryptedLicenseWithGrants); XmlNamespaceManager nsManager; XmlDocument toDecrypt = LoadXmlWithLicenseNs(encryptedLicenseWithGrants, out nsManager); var decryptor = new XmlLicenseEncryptedRef(); var transform = new XmlLicenseTransform() { Decryptor = decryptor, Context = FindLicenseTransformContext(toDecrypt, nsManager) }; decryptor.AddAsymmetricKey(key); // Context is the input for this transform, argument is always ignored transform.LoadInput(null); XmlDocument decryptedDoc = transform.GetOutput() as XmlDocument; Assert.NotNull(decryptedDoc); string decrypted = decryptedDoc.OuterXml; Assert.Equal(expected, decrypted); } } private XmlDocument GetDocumentFromResource(string resourceName) { XmlDocument doc = new XmlDocument(); using (Stream stream = TestHelpers.LoadResourceStream(resourceName)) using (StreamReader streamReader = new StreamReader(stream)) { string originalXml = streamReader.ReadToEnd(); doc.LoadXml(originalXml); } return doc; } private static string GenerateLicenseXmlWithEncryptedGrants(RSA key, out string plainTextLicense) { plainTextLicense = @"<r:license xmlns:r=""urn:mpeg:mpeg21:2003:01-REL-R-NS""> <r:title>Test License</r:title> <r:grant> <r:forAll varName=""licensor"" /> <r:forAll varName=""property"" /> <r:forAll varName=""p0""> <r:propertyPossessor> <r:propertyAbstract varRef=""property"" /> </r:propertyPossessor> </r:forAll> <r:keyHolder varRef=""licensor"" /> <r:issue /> <r:grant> <r:principal varRef=""p0"" /> <x:bar xmlns:x=""urn:foo"" /> <r:digitalResource> <testItem>hello!</testItem> </r:digitalResource> <renderer xmlns=""urn:mpeg:mpeg21:2003:01-REL-MX-NS""> <mx:wildcard xmlns:mx=""urn:mpeg:mpeg21:2003:01-REL-MX-NS""> <r:anXmlExpression>some-xpath-expression</r:anXmlExpression> </mx:wildcard> <mx:wildcard xmlns:mx=""urn:mpeg:mpeg21:2003:01-REL-MX-NS""> <r:anXmlExpression>some-other-xpath-expression</r:anXmlExpression> </mx:wildcard> </renderer> </r:grant> <validityIntervalFloating xmlns=""urn:mpeg:mpeg21:2003:01-REL-SX-NS""> <sx:duration xmlns:sx=""urn:mpeg:mpeg21:2003:01-REL-SX-NS"">P2D</sx:duration> </validityIntervalFloating> </r:grant> <r:grant> <r:possessProperty /> <emailName xmlns=""urn:mpeg:mpeg21:2003:01-REL-SX-NS"">test@test</emailName> </r:grant> <r:issuer xmlns:r=""urn:mpeg:mpeg21:2003:01-REL-R-NS""> <r:details> <r:timeOfIssue>2099-11-11T11:11:11Z</r:timeOfIssue> </r:details> </r:issuer> </r:license>".Replace("\r\n", "\n"); XmlNamespaceManager nsManager; XmlDocument doc = LoadXmlWithLicenseNs(plainTextLicense, out nsManager); EncryptLicense(FindLicenseTransformContext(doc, nsManager), key); return doc.OuterXml; } private static XmlElement FindLicenseTransformContext(XmlDocument doc, XmlNamespaceManager nsManager) { XmlNodeList issuerList = doc.SelectNodes("//r:issuer", nsManager); return issuerList[0] as XmlElement; } private static XmlDocument LoadXmlWithLicenseNs(string xml, out XmlNamespaceManager nsManager) { XmlDocument doc = new XmlDocument(); doc.PreserveWhitespace = true; nsManager = new XmlNamespaceManager(doc.NameTable); nsManager.AddNamespace("r", LicenseTransformNsUrl); doc.LoadXml(xml); return doc; } private static void EncryptGrant(XmlNode grant, RSA key, XmlNamespaceManager nsMgr) { using (var ms = new MemoryStream()) using (var sw = new StreamWriter(ms)) { sw.Write(grant.InnerXml); sw.Flush(); ms.Position = 0; KeyInfo keyInfo; EncryptionMethod encryptionMethod; CipherData cipherData; XmlLicenseEncryptedRef.Encrypt(ms, key, out keyInfo, out encryptionMethod, out cipherData); grant.RemoveAll(); XmlDocument doc = grant.OwnerDocument; XmlElement encryptedGrant = doc.CreateElement("encryptedGrant", LicenseTransformNsUrl); grant.AppendChild(encryptedGrant); encryptedGrant.AppendChild(doc.ImportNode(keyInfo.GetXml(), true)); encryptedGrant.AppendChild(doc.ImportNode(encryptionMethod.GetXml(), true)); encryptedGrant.AppendChild(doc.ImportNode(cipherData.GetXml(), true)); } } private static void EncryptLicense(XmlElement context, RSA key) { XmlDocument doc = context.OwnerDocument; var nsMgr = new XmlNamespaceManager(doc.NameTable); nsMgr.AddNamespace("dsig", SignedXml.XmlDsigNamespaceUrl); nsMgr.AddNamespace("enc", EncryptedXml.XmlEncNamespaceUrl); nsMgr.AddNamespace("r", LicenseTransformNsUrl); XmlElement currentIssuerContext = context.SelectSingleNode("ancestor-or-self::r:issuer[1]", nsMgr) as XmlElement; Assert.NotNull(currentIssuerContext); XmlElement signatureNode = currentIssuerContext.SelectSingleNode("descendant-or-self::dsig:Signature[1]", nsMgr) as XmlElement; if (signatureNode != null) { signatureNode.ParentNode.RemoveChild(signatureNode); } XmlElement currentLicenseContext = currentIssuerContext.SelectSingleNode("ancestor-or-self::r:license[1]", nsMgr) as XmlElement; Assert.NotNull(currentLicenseContext); XmlNodeList issuerList = currentLicenseContext.SelectNodes("descendant-or-self::r:license[1]/r:issuer", nsMgr); for (int i = 0; i < issuerList.Count; i++) { XmlNode issuer = issuerList[i]; if (issuer == currentIssuerContext) { continue; } if (issuer.LocalName == "issuer" && issuer.NamespaceURI == LicenseTransformNsUrl) { issuer.ParentNode.RemoveChild(issuer); } } XmlNodeList encryptedGrantList = currentLicenseContext.SelectNodes("/r:license/r:grant", nsMgr); for (int i = 0; i < encryptedGrantList.Count; i++) { EncryptGrant(encryptedGrantList[i], key, nsMgr); } } } }
using System; using System.Data; using System.Drawing; using System.Xml; using Epi.Collections; using Epi; using Epi.Data; using Epi.Fields; using Epi.Data.Services; namespace Epi { #region Delegate Definition /// <summary> /// ChildViewRequestedEventArgs class /// </summary> public class ChildViewRequestedEventArgs : EventArgs { private View view = null; /// <summary> /// Constructor /// </summary> /// <param name="view">The view of an Epi.Project.</param> public ChildViewRequestedEventArgs(View view) { this.view = view; } /// <summary> /// The view of an Epi.Project. /// </summary> public View View { get { return this.view; } } } /// <summary> /// Delegate for handing the selection of a related view /// </summary> public delegate void ChildViewRequestedEventHandler(object sender, ChildViewRequestedEventArgs e); ///// <summary> ///// Delegate for handling the request of showing a field definition dialog ///// </summary> //public delegate void FieldDialogRequestHandler(object sender, FieldEventArgs e); /// <summary> /// ContextMenuEventArgs class /// </summary> public class ContextMenuEventArgs : EventArgs { private Page page = null; private int x = 0; private int y = 0; /// <summary> /// Constructor /// </summary> /// <param name="page">The page the context menu belongs to</param> /// <param name="x">The X-coordinate of the context menu.</param> /// <param name="y">The Y-coordinate of the context menu.</param> public ContextMenuEventArgs(Page page, int x, int y) { this.page = page; this.x = x; this.y = y; } /// <summary> /// Gets the page this context menu belongs to /// </summary> public Page Page { get { return this.page; } } /// <summary> /// Gets the X-coordinate of the context menu. /// </summary> public int X { get { return this.x; } } /// <summary> /// Gets the Y-coordinate of the context menu. /// </summary> public int Y { get { return this.y; } } } /// <summary> /// Delegate for handling the request of showing the page's context menu /// </summary> public delegate void ContextMenuRequestHandler(object sender, ContextMenuEventArgs e); #endregion /// <summary> /// A page in a view of a project. /// </summary> public class Page : INamedObject { #region Private Class Members private int id; private NamedObjectCollection<GroupField> groupFields; private bool designMode; private string name = string.Empty; private int position = 0; private string checkCodeBefore = string.Empty; private string checkCodeAfter = string.Empty; private int backgroundId = 0; private bool flipLabelColor = false; /// <summary> /// view /// </summary> public View view = null; private XmlElement viewElement; #endregion Private Class Members #region Events /// <summary> /// Occurs when a related view is requested by a relate field /// </summary> public event ChildViewRequestedEventHandler ChildViewRequested; #endregion #region Constructors /// <summary> /// Private constructor - not used /// </summary> public Page() { } /// <summary> /// Constructs a page linked to a view /// </summary> /// <param name="view">A view object</param> public Page(View view) { this.view = view; } //public Page(View view, int pageId) /// <summary> /// Constructor /// </summary> /// <param name="view"></param> /// <param name="pageId"></param> public Page(View view, int pageId) { this.view = view; this.viewElement = view.ViewElement; this.name = GetPageName(view, pageId); this.id = pageId; } /// <summary> /// Constructs a page from database table row /// </summary> /// <param name="row"></param> /// <param name="view"></param> public Page(DataRow row, View view) : this(view) { if (row[ColumnNames.NAME] != DBNull.Value) this.Name = row[ColumnNames.NAME].ToString(); this.Id = (int)row[ColumnNames.PAGE_ID]; this.Position = (short)row[ColumnNames.POSITION]; this.CheckCodeBefore = row[ColumnNames.CHECK_CODE_BEFORE].ToString(); this.CheckCodeAfter = row[ColumnNames.CHECK_CODE_AFTER].ToString(); this.BackgroundId = (int)row[ColumnNames.BACKGROUND_ID]; } #endregion Constructors #region Public Properties /// <summary> /// Name of the page /// </summary> public string Name { get { if (string.IsNullOrEmpty(name)) { name = "New Page"; } return (name); } set { name = value; } } /// <summary> /// Database Table Name of the page /// </summary> public string TableName { get { if (this.view == null) { return null; } else { return this.view.TableName + this.id.ToString(); } } } /// <summary> /// Display name of the page /// </summary> public string DisplayName { get { return (view.DisplayName + "::" + this.Name); } } /// <summary> /// Position of the page in it's views /// </summary> public int Position { get { return (position); } set { position = value; } } /// <summary> /// The value of the primary key, BackgroundId, in metaBackgrounds. /// </summary> public int BackgroundId { get { return (backgroundId); } set { backgroundId = value; if (view != null) { DataTable table = this.GetMetadata().GetBackgroundData(); DataRow[] rows = table.Select(string.Format("{0} = {1}", ColumnNames.BACKGROUND_ID, value)); if (rows.Length > 0) { int color = (int)rows[0]["Color"]; byte a = (byte)(color >> 24); byte r = (byte)(color >> 16); byte g = (byte)(color >> 8); byte b = (byte)(color >> 0); if (r < 64 && g < 64 && b < 64 && a == 255) { flipLabelColor = true; } } } } } public bool FlipLabelColor { get { return flipLabelColor; } } /// <summary> /// Check code that executes after all the data is entered on the page /// </summary> public string CheckCodeAfter { get { return (checkCodeAfter); } set { checkCodeAfter = value; } } /// <summary> /// Check code that executes before the page is loaded for data entry /// </summary> public string CheckCodeBefore { get { return (checkCodeBefore); } set { checkCodeBefore = value; } } /// <summary> /// Gets/sets whether this page is in design mode or data entry mode. /// </summary> public bool DesignMode { get { return designMode; } set { designMode = value; } } /// <summary> /// Gets/sets the Id of the page /// </summary> public virtual int Id { get { return (id); } set { id = value; } } /// <summary> /// Returns a collection of all page's fields /// </summary> public NamedObjectCollection<RenderableField> Fields { get { NamedObjectCollection<RenderableField> pageFields = new NamedObjectCollection<RenderableField>(); FieldCollectionMaster fields = this.GetView().Fields; foreach (Field field in fields) { if (field is RenderableField) { RenderableField renderableField = (RenderableField)field; if (renderableField.Page != null) { if (renderableField.Page.Id == this.Id) { pageFields.Add(renderableField); } } } } return pageFields; } } /// <summary> /// Gets the Field Groups in a Named Object Collection /// </summary> public NamedObjectCollection<GroupField> GroupFields { get { if (groupFields == null) { if (!this.GetView().Project.MetadataSource.Equals(MetadataSource.Xml)) { groupFields = GetMetadata().GetGroupFields(this); } else { groupFields = new NamedObjectCollection<GroupField>(); } } return (groupFields); } } /// <summary> /// Gets field tab order information for the page /// </summary> public DataSets.TabOrders.TabOrderDataTable TabOrderForFields { get { return (GetMetadata().GetTabOrderForFields(this.Id)); } } #endregion Public Properties #region Private Properties #endregion Private Properties #region Static Methods /// <summary> /// Checks the name of a page to make sure the syntax is valid. /// </summary> /// <param name="projectName">The name of the page to validate</param> /// <param name="validationStatus">The message that is passed back to the calling method regarding the status of the validation attempt</param> /// <returns>Whether or not the name passed validation; true for a valid name, false for an invalid name</returns> public static bool IsValidPageName(string pageName, ref string validationStatus) { // assume valid by default bool valid = true; if (string.IsNullOrEmpty(pageName.Trim())) { validationStatus = SharedStrings.INVALID_PAGE_NAME_BLANK; valid = false; } else { for (int i = 0; i < pageName.Length; i++) { string viewChar = pageName.Substring(i, 1); System.Text.RegularExpressions.Match m = System.Text.RegularExpressions.Regex.Match(viewChar, "[A-Za-z0-9 .]"); if (!m.Success) { validationStatus = SharedStrings.INVALID_PAGE_NAME; valid = false; } } if (pageName.IndexOf(".") > -1)//EI-683 { validationStatus = SharedStrings.INVALID_PAGE_NAME; valid = false; } } return valid; } #endregion // Static Methods #region Public Methods /// <summary> /// Returns the parent view /// </summary> /// <returns>The parent view of an Epi.Project.</returns> public View GetView() { return view; } /// <summary> /// Creates field based on the meta field type. /// </summary> /// <param name="fieldType">Enumeration of Field Types.</param> /// <returns>New Field</returns> public Field CreateField(MetaFieldType fieldType) { switch (fieldType) { case MetaFieldType.Checkbox: return new CheckBoxField(this, viewElement); case MetaFieldType.CommandButton: return new CommandButtonField(this, viewElement); case MetaFieldType.Date: return new DateField(this, viewElement); case MetaFieldType.DateTime: return new DateTimeField(this, viewElement); case MetaFieldType.LegalValues: return new DDLFieldOfLegalValues(this, viewElement); case MetaFieldType.Codes: return new DDLFieldOfCodes(this, viewElement); case MetaFieldType.List: return new DDListField(this, viewElement); case MetaFieldType.CommentLegal: return new DDLFieldOfCommentLegal(this, viewElement); case MetaFieldType.Grid: return new GridField(this, viewElement); case MetaFieldType.Group: return new GroupField(this, viewElement); case MetaFieldType.GUID: return new GUIDField(this, viewElement); case MetaFieldType.Image: return new ImageField(this, viewElement); case MetaFieldType.LabelTitle: return new LabelField(this, viewElement); case MetaFieldType.Mirror: return new MirrorField(this, viewElement); case MetaFieldType.Multiline: return new MultilineTextField(this, viewElement); case MetaFieldType.Number: return new NumberField(this, viewElement); case MetaFieldType.Option: return new OptionField(this, viewElement); case MetaFieldType.PhoneNumber: return new PhoneNumberField(this, viewElement); case MetaFieldType.Relate: return new RelatedViewField(this, viewElement); case MetaFieldType.Text: return new SingleLineTextField(this, viewElement); case MetaFieldType.TextUppercase: return new UpperCaseTextField(this, viewElement); case MetaFieldType.Time: return new TimeField(this, viewElement); case MetaFieldType.YesNo: return new YesNoField(this, viewElement); default: return new SingleLineTextField(this, viewElement); } } public double MaxTabIndex { get { double maxTabIndex = 0; foreach (RenderableField field in this.Fields) { if (field.TabIndex > maxTabIndex) { maxTabIndex = field.TabIndex; } } return (maxTabIndex); } } /// <summary> /// Copy Page to another page. /// </summary> /// <param name="other">Destination page.</param> public void CopyTo(Page other) { other.Name = this.Name; other.Position = this.Position; other.CheckCodeBefore = this.CheckCodeBefore; other.CheckCodeAfter = this.CheckCodeAfter; } /// <summary> /// Implements IDisposable.Dispose() method /// </summary> public void Dispose() { } /// <summary> /// Returns the Metadata via a provider /// </summary> /// <returns>Metadata</returns> public IMetadataProvider GetMetadata() { return view.GetMetadata(); } /// <summary> /// Returns the project object /// </summary> /// <returns>Epi.Project</returns> public Project GetProject() { return view.GetProject(); } /// <summary> /// Save page to database. /// </summary> public void SaveToDb() { //if this is the first page, insert it if (this.Id == 0) { GetMetadata().InsertPage(this); } else { GetMetadata().UpdatePage(this); } } /// <summary> /// Adds a new field to the page /// </summary> /// <param name="field">Field to add</param> public void AddNewField(RenderableField field) { field.Page = this; if (!((field is MirrorField) || (field is LabelField))) { field.HasTabStop = true; field.TabIndex = MaxTabIndex + 1; } field.SaveToDb(); // Invalidate the current in-memory field collections and force the app to retreive // a fresh collection from the database whenever Page.Fields or Page.RenderableFields // is invoked. view.MustRefreshFieldCollection = true; } ///// <summary> ///// Adds a new group field to the page ///// </summary> ///// <param name="field">Field to add</param> //public void AddNewGroupField(FieldGroup field) //{ // field.Page = this; // view.MustRefreshFieldCollection = true; //} /// <summary> /// Updates Renderable Field. /// </summary> /// <param name="field">The field that is updated</param> public void UpdateField(RenderableField field) { if (!((field is MirrorField) || (field is LabelField))) { field.HasTabStop = true; field.TabIndex = MaxTabIndex + 1; } field.SaveToDb(); } /// <summary> /// Ensures that the page is currently in design mode. Throws an exception otherwise. /// </summary> public void AssertDesignMode() { if (!DesignMode) { throw new System.ApplicationException(SharedStrings.NOT_VALID_IN_FORM_DESIGN_MODE); } } /// <summary> /// Ensures that the page is currently in data entry mode. Throws an exception otherwise. /// </summary> public void AssertDataEntryMode() { if (DesignMode) { throw new System.ApplicationException(SharedStrings.NOT_VALID_IN_DATA_ENTRY_MODE); } } /// <summary> /// Deletes fields from a page /// </summary> public void DeleteFields() { this.GetMetadata().DeleteFields(this); } #endregion Public Methods #region Event Handlers private void Field_RelatedViewRequested(object sender, ChildViewRequestedEventArgs e) { if (ChildViewRequested != null) { ChildViewRequested(sender, e); } } #endregion #region Private Methods /// <summary> /// Get the page name for XML metadata view pages /// </summary> /// <param name="view">The view the page belongs to</param> /// <param name="pageId">The page id</param> /// <returns>Page name.</returns> private string GetPageName(View view, int pageId) { if (this.view.GetProject().MetadataSource.Equals(MetadataSource.Xml)) { XmlNode pagesNode = view.ViewElement.SelectSingleNode("Pages"); XmlNodeList pageNodeList = pagesNode.SelectNodes("//Page[@PageId= '" + pageId + "']"); if (pageNodeList != null) { foreach (XmlNode pageNode in pageNodeList) { this.name = pageNode.Attributes["Name"].Value; } } } return this.name; } #endregion //Private Methods } }
/* Project Orleans Cloud Service SDK ver. 1.0 Copyright (c) Microsoft Corporation All rights reserved. MIT License Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the ""Software""), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ using System; using System.Collections.Generic; using System.Net; using System.Net.Sockets; using Orleans.Messaging; namespace Orleans.Runtime.Messaging { internal class IncomingMessageAcceptor : AsynchAgent { private readonly IPEndPoint listenAddress; private Action<Message> sniffIncomingMessageHandler; internal readonly static string PingHeader = Message.Header.APPLICATION_HEADER_FLAG + Message.Header.PING_APPLICATION_HEADER; internal Socket AcceptingSocket; protected MessageCenter MessageCenter; protected HashSet<Socket> OpenReceiveSockets; public Action<Message> SniffIncomingMessage { set { if (sniffIncomingMessageHandler != null) throw new InvalidOperationException("IncomingMessageAcceptor SniffIncomingMessage already set"); sniffIncomingMessageHandler = value; } } private const int LISTEN_BACKLOG_SIZE = 1024; protected SocketDirection SocketDirection { get; private set; } // Used for holding enough info to handle receive completion internal IncomingMessageAcceptor(MessageCenter msgCtr, IPEndPoint here, SocketDirection socketDirection) { MessageCenter = msgCtr; listenAddress = here; if (here == null) listenAddress = MessageCenter.MyAddress.Endpoint; AcceptingSocket = SocketManager.GetAcceptingSocketForEndpoint(listenAddress); Log.Info(ErrorCode.Messaging_IMA_OpenedListeningSocket, "Opened a listening socket at address " + AcceptingSocket.LocalEndPoint); OpenReceiveSockets = new HashSet<Socket>(); OnFault = FaultBehavior.CrashOnFault; SocketDirection = socketDirection; } protected override void Run() { try { AcceptingSocket.Listen(LISTEN_BACKLOG_SIZE); AcceptingSocket.BeginAccept(new AsyncCallback(AcceptCallback), this); } catch (Exception ex) { Log.Error(ErrorCode.MessagingBeginAcceptSocketException, "Exception beginning accept on listening socket", ex); throw; } if (Log.IsVerbose3) Log.Verbose3("Started accepting connections."); } public override void Stop() { base.Stop(); if (Log.IsVerbose) Log.Verbose("Disconnecting the listening socket"); SocketManager.CloseSocket(AcceptingSocket); Socket[] temp; lock (Lockable) { temp = new Socket[OpenReceiveSockets.Count]; OpenReceiveSockets.CopyTo(temp); } foreach (var socket in temp) { SafeCloseSocket(socket); } lock (Lockable) { ClearSockets(); } } protected virtual bool RecordOpenedSocket(Socket sock) { Guid client; if (!ReceiveSocketPreample(sock, false, out client)) return false; NetworkingStatisticsGroup.OnOpenedReceiveSocket(); return true; } protected bool ReceiveSocketPreample(Socket sock, bool expectProxiedConnection, out Guid client) { client = default(Guid); if (Cts.IsCancellationRequested) return false; // Receive the client ID var buffer = new byte[16]; int offset = 0; while (offset < buffer.Length) { try { int bytesRead = sock.Receive(buffer, offset, buffer.Length - offset, SocketFlags.None); if (bytesRead == 0) { Log.Warn(ErrorCode.GatewayAcceptor_SocketClosed, "Remote socket closed while receiving client ID from endpoint {0}.", sock.RemoteEndPoint); return false; } offset += bytesRead; } catch (Exception ex) { Log.Warn(ErrorCode.GatewayAcceptor_ExceptionReceiving, "Exception receiving client ID from endpoint " + sock.RemoteEndPoint, ex); return false; } } client = new Guid(buffer); if (Log.IsVerbose2) Log.Verbose2(ErrorCode.MessageAcceptor_Connection, "Received connection from {0} at source address {1}", client, sock.RemoteEndPoint.ToString()); if (expectProxiedConnection) { // Proxied Gateway Connection - must have sender id if (client == SocketManager.SiloDirectConnectionId) { Log.Error(ErrorCode.MessageAcceptor_NotAProxiedConnection, string.Format("Gateway received unexpected non-proxied connection from {0} at source address {1}", client, sock.RemoteEndPoint)); return false; } } else { // Direct connection - should not have sender id if (client != SocketManager.SiloDirectConnectionId) { Log.Error(ErrorCode.MessageAcceptor_UnexpectedProxiedConnection, string.Format("Silo received unexpected proxied connection from {0} at source address {1}", client, sock.RemoteEndPoint)); return false; } } lock (Lockable) { OpenReceiveSockets.Add(sock); } return true; } protected virtual void RecordClosedSocket(Socket sock) { if (TryRemoveClosedSocket(sock)) NetworkingStatisticsGroup.OnClosedReceivingSocket(); } protected bool TryRemoveClosedSocket(Socket sock) { lock (Lockable) { return OpenReceiveSockets.Remove(sock); } } protected virtual void ClearSockets() { OpenReceiveSockets.Clear(); } [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA2204:Literals should be spelled correctly", MessageId = "BeginAccept")] private static void AcceptCallback(IAsyncResult result) { var ima = result.AsyncState as IncomingMessageAcceptor; try { if (ima == null) { var logger = TraceLogger.GetLogger("IncomingMessageAcceptor", TraceLogger.LoggerType.Runtime); if (result.AsyncState == null) logger.Warn(ErrorCode.Messaging_IMA_AcceptCallbackNullState, "AcceptCallback invoked with a null unexpected async state"); else logger.Warn(ErrorCode.Messaging_IMA_AcceptCallbackUnexpectedState, "AcceptCallback invoked with an unexpected async state of type {0}", result.AsyncState.GetType()); return; } // First check to see if we're shutting down, in which case there's no point in doing anything other // than closing the accepting socket and returning. if (ima.Cts.IsCancellationRequested) { SocketManager.CloseSocket(ima.AcceptingSocket); ima.Log.Info(ErrorCode.Messaging_IMA_ClosingSocket, "Closing accepting socket during shutdown"); return; } // Then, start a new Accept try { ima.AcceptingSocket.BeginAccept(new AsyncCallback(AcceptCallback), ima); } catch (Exception ex) { ima.Log.Warn(ErrorCode.MessagingBeginAcceptSocketException, "Exception on accepting socket during BeginAccept", ex); // Open a new one ima.RestartAcceptingSocket(); } Socket sock; // Complete this accept try { sock = ima.AcceptingSocket.EndAccept(result); } catch (ObjectDisposedException) { // Socket was closed, but we're not shutting down; we need to open a new socket and start over... // Close the old socket and open a new one ima.Log.Warn(ErrorCode.MessagingAcceptingSocketClosed, "Accepting socket was closed when not shutting down"); ima.RestartAcceptingSocket(); return; } catch (Exception ex) { // There was a network error. We need to get a new accepting socket and re-issue an accept before we continue. // Close the old socket and open a new one ima.Log.Warn(ErrorCode.MessagingEndAcceptSocketException, "Exception on accepting socket during EndAccept", ex); ima.RestartAcceptingSocket(); return; } if (ima.Log.IsVerbose3) ima.Log.Verbose3("Received a connection from {0}", sock.RemoteEndPoint); // Finally, process the incoming request: // Prep the socket so it will reset on close sock.LingerState = new LingerOption(true, 0); // Add the socket to the open socket collection if (ima.RecordOpenedSocket(sock)) { // And set up the asynch receive var rcc = new ReceiveCallbackContext(sock, ima); try { rcc.BeginReceive(new AsyncCallback(ReceiveCallback)); } catch (Exception exception) { var socketException = exception as SocketException; ima.Log.Warn(ErrorCode.Messaging_IMA_NewBeginReceiveException, String.Format("Exception on new socket during BeginReceive with RemoteEndPoint {0}: {1}", socketException != null ? socketException.SocketErrorCode.ToString() : "", rcc.RemoteEndPoint), exception); ima.SafeCloseSocket(sock); } } else { ima.SafeCloseSocket(sock); } } catch (Exception ex) { var logger = ima != null ? ima.Log : TraceLogger.GetLogger("IncomingMessageAcceptor", TraceLogger.LoggerType.Runtime); logger.Error(ErrorCode.Messaging_IMA_ExceptionAccepting, "Unexpected exception in IncomingMessageAccepter.AcceptCallback", ex); } } private static void ReceiveCallback(IAsyncResult result) { var rcc = result.AsyncState as ReceiveCallbackContext; if (rcc == null) { // This should never happen. Trap it and drop it on the floor because allowing a null reference exception would // kill the process silently. return; } try { // First check to see if we're shutting down, in which case there's no point in doing anything other // than closing the accepting socket and returning. if (rcc.IMA.Cts.IsCancellationRequested) { // We're exiting, so close the socket and clean up rcc.IMA.SafeCloseSocket(rcc.Sock); } int bytes = 0; // Complete the receive try { bytes = rcc.Sock.EndReceive(result); } catch (ObjectDisposedException) { // The socket is closed. Just clean up and return. rcc.IMA.RecordClosedSocket(rcc.Sock); return; } catch (Exception ex) { rcc.IMA.Log.Warn(ErrorCode.Messaging_ExceptionReceiving, "Exception while completing a receive from " + rcc.Sock.RemoteEndPoint, ex); // Either there was a network error or the socket is being closed. Either way, just clean up and return. rcc.IMA.SafeCloseSocket(rcc.Sock); return; } //rcc.IMA.log.Verbose("Receive completed with " + bytes.ToString(CultureInfo.InvariantCulture) + " bytes"); if (bytes == 0) { // Socket was closed by the sender. so close our end rcc.IMA.SafeCloseSocket(rcc.Sock); // And we're done return; } // Process the buffer we received try { rcc.ProcessReceivedBuffer(bytes); } catch (Exception ex) { rcc.IMA.Log.Error(ErrorCode.Messaging_IMA_BadBufferReceived, String.Format("ProcessReceivedBuffer exception with RemoteEndPoint {0}: ", rcc.RemoteEndPoint), ex); // There was a problem with the buffer, presumably data corruption, so give up rcc.IMA.SafeCloseSocket(rcc.Sock); // And we're done return; } // Start the next receive. Note that if this throws, the exception will be logged in the catch below. rcc.BeginReceive(ReceiveCallback); } catch (Exception ex) { rcc.IMA.Log.Warn(ErrorCode.Messaging_IMA_DroppingConnection, "Exception receiving from end point " + rcc.RemoteEndPoint, ex); rcc.IMA.SafeCloseSocket(rcc.Sock); } } protected virtual void HandleMessage(Message msg, Socket receivedOnSocket) { if (Message.WriteMessagingTraces) msg.AddTimestamp(Message.LifecycleTag.ReceiveIncoming); // See it's a Ping message, and if so, short-circuit it if (msg.GetScalarHeader<bool>(PingHeader)) { MessagingStatisticsGroup.OnPingReceive(msg.SendingSilo); if (Log.IsVerbose2) Log.Verbose2("Responding to Ping from {0}", msg.SendingSilo); if (!msg.TargetSilo.Equals(MessageCenter.MyAddress)) // got ping that is not destined to me. For example, got a ping to my older incarnation. { MessagingStatisticsGroup.OnRejectedMessage(msg); Message rejection = msg.CreateRejectionResponse(Message.RejectionTypes.Unrecoverable, string.Format("The target silo is no longer active: target was {0}, but this silo is {1}. The rejected ping message is {2}.", msg.TargetSilo.ToLongString(), MessageCenter.MyAddress.ToLongString(), msg.ToString())); MessageCenter.OutboundQueue.SendMessage(rejection); } else { var response = msg.CreateResponseMessage(); response.BodyObject = Response.Done; MessageCenter.SendMessage(response); } return; } // sniff message headers for directory cache management if (sniffIncomingMessageHandler != null) sniffIncomingMessageHandler(msg); // Don't process messages that have already timed out if (msg.IsExpired) { msg.DropExpiredMessage(MessagingStatisticsGroup.Phase.Receive); return; } // If we've stopped application message processing, then filter those out now // Note that if we identify or add other grains that are required for proper stopping, we will need to treat them as we do the membership table grain here. if (MessageCenter.IsBlockingApplicationMessages && (msg.Category == Message.Categories.Application) && (msg.SendingGrain != Constants.SystemMembershipTableId)) { // We reject new requests, and drop all other messages if (msg.Direction != Message.Directions.Request) return; MessagingStatisticsGroup.OnRejectedMessage(msg); var reject = msg.CreateRejectionResponse(Message.RejectionTypes.Unrecoverable, "Silo stopping"); MessageCenter.SendMessage(reject); return; } // Make sure the message is for us. Note that some control messages may have no target // information, so a null target silo is OK. if ((msg.TargetSilo == null) || msg.TargetSilo.Matches(MessageCenter.MyAddress)) { // See if it's a message for a client we're proxying. if (MessageCenter.IsProxying && MessageCenter.TryDeliverToProxy(msg)) return; // Nope, it's for us MessageCenter.InboundQueue.PostMessage(msg); return; } if (!msg.TargetSilo.Endpoint.Equals(MessageCenter.MyAddress.Endpoint)) { // If the message is for some other silo altogether, then we need to forward it. if (Log.IsVerbose2) Log.Verbose2("Forwarding message {0} from {1} to silo {2}", msg.Id, msg.SendingSilo, msg.TargetSilo); if (Message.WriteMessagingTraces) msg.AddTimestamp(Message.LifecycleTag.EnqueueForForwarding); MessageCenter.OutboundQueue.SendMessage(msg); return; } // If the message was for this endpoint but an older epoch, then reject the message // (if it was a request), or drop it on the floor if it was a response or one-way. if (msg.Direction == Message.Directions.Request) { MessagingStatisticsGroup.OnRejectedMessage(msg); Message rejection = msg.CreateRejectionResponse(Message.RejectionTypes.Transient, string.Format("The target silo is no longer active: target was {0}, but this silo is {1}. The rejected message is {2}.", msg.TargetSilo.ToLongString(), MessageCenter.MyAddress.ToLongString(), msg.ToString())); MessageCenter.OutboundQueue.SendMessage(rejection); if (Log.IsVerbose) Log.Verbose("Rejecting an obsolete request; target was {0}, but this silo is {1}. The rejected message is {2}.", msg.TargetSilo.ToLongString(), MessageCenter.MyAddress.ToLongString(), msg.ToString()); } } private void RestartAcceptingSocket() { try { SocketManager.CloseSocket(AcceptingSocket); AcceptingSocket = SocketManager.GetAcceptingSocketForEndpoint(listenAddress); AcceptingSocket.Listen(LISTEN_BACKLOG_SIZE); AcceptingSocket.BeginAccept(new AsyncCallback(AcceptCallback), this); } catch (Exception ex) { Log.Error(ErrorCode.Runtime_Error_100016, "Unable to create a new accepting socket", ex); throw; } } private void SafeCloseSocket(Socket sock) { RecordClosedSocket(sock); SocketManager.CloseSocket(sock); } private class ReceiveCallbackContext { internal enum ReceivePhase { Lengths, Header, Body, MetaHeader, HeaderBodies } private ReceivePhase phase; private byte[] lengthBuffer; private readonly byte[] metaHeaderBuffer; private List<ArraySegment<byte>> lengths; private List<ArraySegment<byte>> header; private List<ArraySegment<byte>> body; private readonly List<ArraySegment<byte>> metaHeader; private List<ArraySegment<byte>> headerBodies; private int headerLength; private int bodyLength; private int[] headerLengths; private int[] bodyLengths; private int headerBodiesLength; private int offset; private readonly bool batchingMode; private int numberOfMessages; public Socket Sock { get; private set; } public EndPoint RemoteEndPoint { get; private set; } public IncomingMessageAcceptor IMA { get; private set; } private List<ArraySegment<byte>> CurrentBuffer { get { if (batchingMode) { switch (phase) { case ReceivePhase.MetaHeader: return metaHeader; case ReceivePhase.Lengths: return lengths; default: return headerBodies; } } switch (phase) { case ReceivePhase.Lengths: return lengths; case ReceivePhase.Header: return header; default: return body; } } } private int CurrentLength { get { if (batchingMode) { switch (phase) { case ReceivePhase.MetaHeader: return Message.LENGTH_META_HEADER; case ReceivePhase.Lengths: if (numberOfMessages == 0) { IMA.Log.Info("Error: numberOfMessages must NOT be 0 here."); return 0; } return Message.LENGTH_HEADER_SIZE * numberOfMessages; default: return headerBodiesLength; } } switch (phase) { case ReceivePhase.Lengths: return Message.LENGTH_HEADER_SIZE; case ReceivePhase.Header: return headerLength; default: return bodyLength; } } } public ReceiveCallbackContext(Socket sock, IncomingMessageAcceptor ima) { batchingMode = ima.MessageCenter.MessagingConfiguration.UseMessageBatching; if (batchingMode) { phase = ReceivePhase.MetaHeader; Sock = sock; RemoteEndPoint = sock.RemoteEndPoint; IMA = ima; metaHeaderBuffer = new byte[Message.LENGTH_META_HEADER]; metaHeader = new List<ArraySegment<byte>>() { new ArraySegment<byte>(metaHeaderBuffer) }; // LengthBuffer and Lengths cannot be allocated here because the sizes varies in response to the number of received messages lengthBuffer = null; lengths = null; header = null; body = null; headerBodies = null; headerLengths = null; bodyLengths = null; headerBodiesLength = 0; numberOfMessages = 0; offset = 0; } else { phase = ReceivePhase.Lengths; Sock = sock; RemoteEndPoint = sock.RemoteEndPoint; IMA = ima; lengthBuffer = new byte[Message.LENGTH_HEADER_SIZE]; lengths = new List<ArraySegment<byte>>() { new ArraySegment<byte>(lengthBuffer) }; header = null; body = null; headerLength = 0; bodyLength = 0; offset = 0; } } private void Reset() { if (batchingMode) { phase = ReceivePhase.MetaHeader; // MetaHeader MUST NOT set to null because it will be re-used. lengthBuffer = null; lengths = null; header = null; body = null; headerLengths = null; bodyLengths = null; headerBodies = null; headerBodiesLength = 0; numberOfMessages = 0; offset = 0; } else { phase = ReceivePhase.Lengths; headerLength = 0; bodyLength = 0; offset = 0; header = null; body = null; } } // Builds the list of buffer segments to pass to Socket.BeginReceive, based on the total list (CurrentBuffer) // and how much we've already filled in (Offset). We have to do this because the scatter/gather variant of // the BeginReceive API doesn't allow you to specify an offset into the list of segments. // To build the list, we walk through the complete buffer, skipping segments that we've already filled up; // add the partial segment for whatever's left in the first unfilled buffer, and then add any remaining buffers. private List<ArraySegment<byte>> BuildSegmentList() { return ByteArrayBuilder.BuildSegmentList(CurrentBuffer, offset); } public void BeginReceive(AsyncCallback callback) { try { Sock.BeginReceive(BuildSegmentList(), SocketFlags.None, callback, this); } catch (Exception ex) { IMA.Log.Warn(ErrorCode.MessagingBeginReceiveException, "Exception trying to begin receive from endpoint " + RemoteEndPoint, ex); throw; } } #if TRACK_DETAILED_STATS // Global collection of ThreadTrackingStatistic for thread pool and IO completion threads. public static readonly System.Collections.Concurrent.ConcurrentDictionary<int, ThreadTrackingStatistic> trackers = new System.Collections.Concurrent.ConcurrentDictionary<int, ThreadTrackingStatistic>(); #endif public void ProcessReceivedBuffer(int bytes) { offset += bytes; if (offset < CurrentLength) return; // Nothing to do except start the next receive #if TRACK_DETAILED_STATS ThreadTrackingStatistic tracker = null; if (StatisticsCollector.CollectThreadTimeTrackingStats) { int id = System.Threading.Thread.CurrentThread.ManagedThreadId; if (!trackers.TryGetValue(id, out tracker)) { tracker = new ThreadTrackingStatistic("ThreadPoolThread." + System.Threading.Thread.CurrentThread.ManagedThreadId); bool added = trackers.TryAdd(id, tracker); if (added) { tracker.OnStartExecution(); } } tracker.OnStartProcessing(); } #endif try { if (batchingMode) { switch (phase) { case ReceivePhase.MetaHeader: numberOfMessages = BitConverter.ToInt32(metaHeaderBuffer, 0); lengthBuffer = new byte[numberOfMessages * Message.LENGTH_HEADER_SIZE]; lengths = new List<ArraySegment<byte>>() { new ArraySegment<byte>(lengthBuffer) }; phase = ReceivePhase.Lengths; offset = 0; break; case ReceivePhase.Lengths: headerBodies = new List<ArraySegment<byte>>(); headerLengths = new int[numberOfMessages]; bodyLengths = new int[numberOfMessages]; for (int i = 0; i < numberOfMessages; i++) { headerLengths[i] = BitConverter.ToInt32(lengthBuffer, i * 8); bodyLengths[i] = BitConverter.ToInt32(lengthBuffer, i * 8 + 4); headerBodiesLength += (headerLengths[i] + bodyLengths[i]); // We need to set the boundary of ArraySegment<byte>s to the same as the header/body boundary headerBodies.AddRange(BufferPool.GlobalPool.GetMultiBuffer(headerLengths[i])); headerBodies.AddRange(BufferPool.GlobalPool.GetMultiBuffer(bodyLengths[i])); } phase = ReceivePhase.HeaderBodies; offset = 0; break; case ReceivePhase.HeaderBodies: int lengtshSoFar = 0; for (int i = 0; i < numberOfMessages; i++) { header = ByteArrayBuilder.BuildSegmentListWithLengthLimit(headerBodies, lengtshSoFar, headerLengths[i]); body = ByteArrayBuilder.BuildSegmentListWithLengthLimit(headerBodies, lengtshSoFar + headerLengths[i], bodyLengths[i]); lengtshSoFar += (headerLengths[i] + bodyLengths[i]); var msg = new Message(header, body); MessagingStatisticsGroup.OnMessageReceive(msg, headerLengths[i], bodyLengths[i]); if (IMA.Log.IsVerbose3) IMA.Log.Verbose3("Received a complete message of {0} bytes from {1}", headerLengths[i] + bodyLengths[i], msg.SendingAddress); if (headerLengths[i] + bodyLengths[i] > Message.LargeMessageSizeThreshold) { IMA.Log.Info(ErrorCode.Messaging_LargeMsg_Incoming, "Receiving large message Size={0} HeaderLength={1} BodyLength={2}. Msg={3}", headerLengths[i] + bodyLengths[i], headerLengths[i], bodyLengths[i], msg.ToString()); if (IMA.Log.IsVerbose3) IMA.Log.Verbose3("Received large message {0}", msg.ToLongString()); } IMA.HandleMessage(msg, Sock); } MessagingStatisticsGroup.OnMessageBatchReceive(IMA.SocketDirection, numberOfMessages, lengtshSoFar); Reset(); break; } } else { // We've completed a buffer. What we do depends on which phase we were in switch (phase) { case ReceivePhase.Lengths: // Pull out the header and body lengths headerLength = BitConverter.ToInt32(lengthBuffer, 0); bodyLength = BitConverter.ToInt32(lengthBuffer, 4); header = BufferPool.GlobalPool.GetMultiBuffer(headerLength); body = BufferPool.GlobalPool.GetMultiBuffer(bodyLength); phase = ReceivePhase.Header; offset = 0; break; case ReceivePhase.Header: phase = ReceivePhase.Body; offset = 0; break; case ReceivePhase.Body: var msg = new Message(header, body); MessagingStatisticsGroup.OnMessageReceive(msg, headerLength, bodyLength); if (IMA.Log.IsVerbose3) IMA.Log.Verbose3("Received a complete message of {0} bytes from {1}", headerLength + bodyLength, msg.SendingAddress); if (headerLength + bodyLength > Message.LargeMessageSizeThreshold) { IMA.Log.Info(ErrorCode.Messaging_LargeMsg_Incoming, "Receiving large message Size={0} HeaderLength={1} BodyLength={2}. Msg={3}", headerLength + bodyLength, headerLength, bodyLength, msg.ToString()); if (IMA.Log.IsVerbose3) IMA.Log.Verbose3("Received large message {0}", msg.ToLongString()); } IMA.HandleMessage(msg, Sock); Reset(); break; } } } catch (Exception exc) { try { // Log details of receive state machine IMA.Log.Error(ErrorCode.MessagingProcessReceiveBufferException, string.Format( "Exception trying to process {0} bytes from endpoint {1} at offset {2} in phase {3}" + " CurrentLength={4} HeaderLength={5} BodyLength={6}", bytes, RemoteEndPoint, offset, phase, CurrentLength, headerLength, bodyLength ), exc); } catch (Exception) { } Reset(); // Reset back to a hopefully good base state throw; } finally { #if TRACK_DETAILED_STATS if (StatisticsCollector.CollectThreadTimeTrackingStats) { tracker.IncrementNumberOfProcessed(); tracker.OnStopProcessing(); } #endif } } } } } 
// // QueryNode.cs // // Author: // Aaron Bockover <[email protected]> // Gabriel Burt <[email protected]> // // Copyright (C) 2007-2008 Novell, Inc. // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.Collections.Generic; using System.Xml; using System.IO; using System.Text; namespace Hyena.Query { public enum QueryNodeSearchMethod { DepthFirst, BreadthFirst } public abstract class QueryNode { private QueryListNode parent; private int source_column; private int source_line; public QueryNode() { } public QueryNode(QueryListNode parent) { Parent = parent; Parent.AddChild(this); } protected void PrintIndent(int depth) { Console.Write(String.Empty.PadLeft(depth * 2, ' ')); } public void Dump() { Dump(0); } internal virtual void Dump(int depth) { PrintIndent(depth); Console.WriteLine(this); } public abstract QueryNode Trim (); public string ToUserQuery () { StringBuilder sb = new StringBuilder (); AppendUserQuery (sb); return sb.ToString (); } public abstract void AppendUserQuery (StringBuilder sb); public string ToXml (QueryFieldSet fieldSet) { return ToXml (fieldSet, false); } public virtual string ToXml (QueryFieldSet fieldSet, bool pretty) { XmlDocument doc = new XmlDocument (); XmlElement request = doc.CreateElement ("request"); doc.AppendChild (request); XmlElement query = doc.CreateElement ("query"); query.SetAttribute ("banshee-version", "1"); request.AppendChild (query); AppendXml (doc, query, fieldSet); if (!pretty) { return doc.OuterXml; } using (StringWriter sw = new StringWriter ()) { using (XmlTextWriter xtw = new XmlTextWriter (sw)) { xtw.Formatting = System.Xml.Formatting.Indented; xtw.Indentation = 2; doc.WriteTo (xtw); return sw.ToString (); } } } public IEnumerable<T> SearchForValues<T> () where T : QueryValue { return SearchForValues<T> (QueryNodeSearchMethod.DepthFirst); } public IEnumerable<T> SearchForValues<T> (QueryNodeSearchMethod method) where T : QueryValue { if (method == QueryNodeSearchMethod.DepthFirst) { return SearchForValuesByDepth<T> (this); } else { return SearchForValuesByBreadth<T> (); } } private static IEnumerable<T> SearchForValuesByDepth<T> (QueryNode node) where T : QueryValue { QueryListNode list = node as QueryListNode; if (list != null) { foreach (QueryNode child in list.Children) { foreach (T item in SearchForValuesByDepth<T> (child)) { yield return item; } } } else { QueryTermNode term = node as QueryTermNode; if (term != null) { T value = term.Value as T; if (value != null) { yield return value; } } } } private IEnumerable<T> SearchForValuesByBreadth<T> () where T : QueryValue { Queue<QueryNode> queue = new Queue<QueryNode> (); queue.Enqueue (this); do { QueryNode node = queue.Dequeue (); QueryListNode list = node as QueryListNode; if (list != null) { foreach (QueryNode child in list.Children) { queue.Enqueue (child); } } else { QueryTermNode term = node as QueryTermNode; if (term != null) { T value = term.Value as T; if (value != null) { yield return value; } } } } while (queue.Count > 0); } public IEnumerable<QueryField> GetFields () { foreach (QueryTermNode term in GetTerms ()) yield return term.Field; } public IEnumerable<QueryTermNode> GetTerms () { Queue<QueryNode> queue = new Queue<QueryNode> (); queue.Enqueue (this); do { QueryNode node = queue.Dequeue (); QueryListNode list = node as QueryListNode; if (list != null) { foreach (QueryNode child in list.Children) { queue.Enqueue (child); } } else { QueryTermNode term = node as QueryTermNode; if (term != null) { yield return term; } } } while (queue.Count > 0); } public override string ToString () { return ToUserQuery (); } public abstract void AppendXml (XmlDocument doc, XmlNode parent, QueryFieldSet fieldSet); public virtual string ToSql (QueryFieldSet fieldSet) { StringBuilder sb = new StringBuilder (); AppendSql (sb, fieldSet); return sb.ToString (); } public abstract void AppendSql (StringBuilder sb, QueryFieldSet fieldSet); public QueryListNode Parent { get { return parent; } set { parent = value; } } public int SourceColumn { get { return source_column; } set { source_column = value; } } public int SourceLine { get { return source_line; } set { source_line = value; } } } }
//----------------------------------------------------------------------- // <copyright file="Shape.cs" company="Akka.NET Project"> // Copyright (C) 2015-2016 Lightbend Inc. <http://www.lightbend.com> // Copyright (C) 2013-2016 Akka.NET project <https://github.com/akkadotnet/akka.net> // </copyright> //----------------------------------------------------------------------- using System; using System.Collections.Generic; using System.Collections.Immutable; using System.Linq; using Akka.Streams.Implementation; namespace Akka.Streams { /// <summary> /// An input port of a <see cref="IModule"/>. This type logically belongs /// into the impl package but must live here due to how sealed works. /// It is also used in the Java DSL for "untyped Inlets" as a work-around /// for otherwise unreasonable existential types. /// </summary> public abstract class InPort { /// <summary> /// TBD /// </summary> internal int Id = -1; } /// <summary> /// An output port of a StreamLayout.Module. This type logically belongs /// into the impl package but must live here due to how sealed works. /// It is also used in the Java DSL for "untyped Outlets" as a work-around /// for otherwise unreasonable existential types. /// </summary> public abstract class OutPort { /// <summary> /// TBD /// </summary> internal int Id = -1; } /// <summary> /// An Inlet is a typed input to a Shape. Its partner in the Module view /// is the InPort(which does not bear an element type because Modules only /// express the internal structural hierarchy of stream topologies). /// </summary> public abstract class Inlet : InPort { /// <summary> /// TBD /// </summary> /// <typeparam name="T">TBD</typeparam> /// <param name="inlet">TBD</param> /// <returns>TBD</returns> public static Inlet<T> Create<T>(Inlet inlet) => inlet as Inlet<T> ?? new Inlet<T>(inlet.Name); /// <summary> /// TBD /// </summary> /// <param name="name">TBD</param> /// <exception cref="ArgumentException"> /// This exception is thrown when the specified <paramref name="name"/> is undefined. /// </exception> protected Inlet(string name) => Name = name ?? throw new ArgumentException("Inlet name must be defined"); /// <summary> /// TBD /// </summary> public readonly string Name; /// <summary> /// TBD /// </summary> /// <returns>TBD</returns> public abstract Inlet CarbonCopy(); /// <inheritdoc/> public sealed override string ToString() => Name; } /// <summary> /// TBD /// </summary> /// <typeparam name="T">TBD</typeparam> public sealed class Inlet<T> : Inlet { /// <summary> /// TBD /// </summary> /// <param name="name">TBD</param> public Inlet(string name) : base(name) { } /// <summary> /// TBD /// </summary> /// <typeparam name="TOther">TBD</typeparam> /// <returns>TBD</returns> internal Inlet<TOther> As<TOther>() => Create<TOther>(this); /// <summary> /// TBD /// </summary> /// <returns>TBD</returns> public override Inlet CarbonCopy() => new Inlet<T>(Name); } /// <summary> /// An Outlet is a typed output to a Shape. Its partner in the Module view /// is the OutPort(which does not bear an element type because Modules only /// express the internal structural hierarchy of stream topologies). /// </summary> public abstract class Outlet : OutPort { /// <summary> /// TBD /// </summary> /// <typeparam name="T">TBD</typeparam> /// <param name="outlet">TBD</param> /// <returns>TBD</returns> public static Outlet<T> Create<T>(Outlet outlet) => outlet as Outlet<T> ?? new Outlet<T>(outlet.Name); /// <summary> /// TBD /// </summary> /// <param name="name">TBD</param> protected Outlet(string name) => Name = name; /// <summary> /// TBD /// </summary> public readonly string Name; /// <summary> /// TBD /// </summary> /// <returns>TBD</returns> public abstract Outlet CarbonCopy(); /// <inheritdoc/> public sealed override string ToString() => Name; } /// <summary> /// TBD /// </summary> public sealed class Outlet<T> : Outlet { /// <summary> /// TBD /// </summary> /// <param name="name">TBD</param> public Outlet(string name) : base(name) { } /// <summary> /// TBD /// </summary> /// <typeparam name="TOther">TBD</typeparam> /// <returns>TBD</returns> internal Outlet<TOther> As<TOther>() => Create<TOther>(this); /// <summary> /// TBD /// </summary> /// <returns>TBD</returns> public override Outlet CarbonCopy() => new Outlet<T>(Name); } /// <summary> /// A Shape describes the inlets and outlets of a <see cref="IGraph{TShape}"/>. In keeping with the /// philosophy that a Graph is a freely reusable blueprint, everything that /// matters from the outside are the connections that can be made with it, /// otherwise it is just a black box. /// </summary> public abstract class Shape #if CLONEABLE : ICloneable #endif { /// <summary> /// Gets list of all input ports. /// </summary> public abstract ImmutableArray<Inlet> Inlets { get; } /// <summary> /// Gets list of all output ports. /// </summary> public abstract ImmutableArray<Outlet> Outlets { get; } /// <summary> /// Create a copy of this Shape object, returning the same type as the /// original; this constraint can unfortunately not be expressed in the /// type system. /// </summary> /// <returns>TBD</returns> public abstract Shape DeepCopy(); /// <summary> /// Create a copy of this Shape object, returning the same type as the /// original but containing the ports given within the passed-in Shape. /// </summary> /// <param name="inlets">TBD</param> /// <param name="outlets">TBD</param> /// <returns>TBD</returns> public abstract Shape CopyFromPorts(ImmutableArray<Inlet> inlets, ImmutableArray<Outlet> outlets); /// <summary> /// Compare this to another shape and determine whether the set of ports is the same (ignoring their ordering). /// </summary> /// <param name="shape">TBD</param> /// <returns>TBD</returns> public bool HasSamePortsAs(Shape shape) { var inlets = new HashSet<Inlet>(Inlets); var outlets = new HashSet<Outlet>(Outlets); return inlets.SetEquals(shape.Inlets) && outlets.SetEquals(shape.Outlets); } /// <summary> /// Compare this to another shape and determine whether the arrangement of ports is the same (including their ordering). /// </summary> /// <param name="shape">TBD</param> /// <returns>TBD</returns> public bool HasSamePortsAndShapeAs(Shape shape) => Inlets.Equals(shape.Inlets) && Outlets.Equals(shape.Outlets); /// <summary> /// TBD /// </summary> /// <returns>TBD</returns> public object Clone() => DeepCopy(); /// <inheritdoc/> public sealed override string ToString() => $"{GetType().Name}([{string.Join(", ", Inlets)}] [{string.Join(", ", Outlets)}])"; } /// <summary> /// This <see cref="Shape"/> is used for graphs that have neither open inputs nor open /// outputs. Only such a <see cref="IGraph{TShape,TMaterializer}"/> can be materialized by a <see cref="IMaterializer"/>. /// </summary> public class ClosedShape : Shape { /// <summary> /// TBD /// </summary> public static readonly ClosedShape Instance = new ClosedShape(); private ClosedShape() { } /// <summary> /// TBD /// </summary> public override ImmutableArray<Inlet> Inlets => ImmutableArray<Inlet>.Empty; /// <summary> /// TBD /// </summary> public override ImmutableArray<Outlet> Outlets => ImmutableArray<Outlet>.Empty; /// <summary> /// TBD /// </summary> /// <returns>TBD</returns> public override Shape DeepCopy() => this; /// <summary> /// TBD /// </summary> /// <param name="inlets">TBD</param> /// <param name="outlets">TBD</param> /// <exception cref="ArgumentException"> /// This exception is thrown when the size of the specified <paramref name="inlets"/> array is zero /// or the size of the specified <paramref name="outlets"/> array is zero. /// </exception> /// <returns>TBD</returns> public override Shape CopyFromPorts(ImmutableArray<Inlet> inlets, ImmutableArray<Outlet> outlets) { if (inlets.Any()) throw new ArgumentException("Proposed inlets do not fit ClosedShape", nameof(inlets)); if (outlets.Any()) throw new ArgumentException("Proposed outlets do not fit ClosedShape", nameof(outlets)); return this; } } /// <summary> /// This type of <see cref="Shape"/> can express any number of inputs and outputs at the /// expense of forgetting about their specific types. It is used mainly in the /// implementation of the <see cref="IGraph{TShape,TMaterializer}"/> builders and typically replaced by a more /// meaningful type of Shape when the building is finished. /// </summary> public class AmorphousShape : Shape { /// <summary> /// TBD /// </summary> /// <param name="inlets">TBD</param> /// <param name="outlets">TBD</param> public AmorphousShape(ImmutableArray<Inlet> inlets, ImmutableArray<Outlet> outlets) { Inlets = inlets; Outlets = outlets; } /// <summary> /// TBD /// </summary> public override ImmutableArray<Inlet> Inlets { get; } /// <summary> /// TBD /// </summary> public override ImmutableArray<Outlet> Outlets { get; } /// <summary> /// TBD /// </summary> /// <returns>TBD</returns> public override Shape DeepCopy() => new AmorphousShape(Inlets.Select(i => i.CarbonCopy()).ToImmutableArray(),Outlets.Select(o => o.CarbonCopy()).ToImmutableArray()); /// <summary> /// TBD /// </summary> /// <param name="inlets">TBD</param> /// <param name="outlets">TBD</param> /// <returns>TBD</returns> public override Shape CopyFromPorts(ImmutableArray<Inlet> inlets, ImmutableArray<Outlet> outlets) => new AmorphousShape(inlets, outlets); } /// <summary> /// A Source <see cref="Shape"/> has exactly one output and no inputs, it models a source of data. /// </summary> /// <typeparam name="TOut">TBD</typeparam> public sealed class SourceShape<TOut> : Shape { /// <summary> /// TBD /// </summary> /// <param name="outlet">TBD</param> /// <exception cref="ArgumentNullException">TBD</exception> public SourceShape(Outlet<TOut> outlet) { Outlet = outlet ?? throw new ArgumentNullException(nameof(outlet)); Outlets = ImmutableArray.Create<Outlet>(outlet); } /// <summary> /// TBD /// </summary> public readonly Outlet<TOut> Outlet; /// <summary> /// TBD /// </summary> public override ImmutableArray<Inlet> Inlets => ImmutableArray<Inlet>.Empty; /// <summary> /// TBD /// </summary> public override ImmutableArray<Outlet> Outlets { get; } /// <summary> /// TBD /// </summary> /// <returns>TBD</returns> public override Shape DeepCopy() => new SourceShape<TOut>((Outlet<TOut>) Outlet.CarbonCopy()); /// <summary> /// TBD /// </summary> /// <param name="inlets">TBD</param> /// <param name="outlets">TBD</param> /// <exception cref="ArgumentException"> /// This exception is thrown when the size of the specified <paramref name="inlets"/> array is zero /// or the size of the specified <paramref name="outlets"/> array is one. /// </exception> /// <returns>TBD</returns> public override Shape CopyFromPorts(ImmutableArray<Inlet> inlets, ImmutableArray<Outlet> outlets) { if (inlets.Length != 0) throw new ArgumentException("Proposed inlets do not fit SourceShape", nameof(inlets)); if (outlets.Length != 1) throw new ArgumentException("Proposed outlets do not fit SourceShape", nameof(outlets)); return new SourceShape<TOut>(outlets[0] as Outlet<TOut>); } /// <inheritdoc/> public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) return false; if (ReferenceEquals(this, obj)) return true; return obj is SourceShape<TOut> shape && Equals(shape); } /// <inheritdoc/> private bool Equals(SourceShape<TOut> other) => Outlet.Equals(other.Outlet); /// <inheritdoc/> public override int GetHashCode() => Outlet.GetHashCode(); } /// <summary> /// TBD /// </summary> public interface IFlowShape { /// <summary> /// TBD /// </summary> Inlet Inlet { get; } /// <summary> /// TBD /// </summary> Outlet Outlet { get; } } /// <summary> /// A Flow <see cref="Shape"/> has exactly one input and one output, it looks from the /// outside like a pipe (but it can be a complex topology of streams within of course). /// </summary> /// <typeparam name="TIn">TBD</typeparam> /// <typeparam name="TOut">TBD</typeparam> public sealed class FlowShape<TIn, TOut> : Shape, IFlowShape { /// <summary> /// TBD /// </summary> /// <param name="inlet">TBD</param> /// <param name="outlet">TBD</param> /// <exception cref="ArgumentNullException"> /// This exception is thrown when either the specified <paramref name="inlet"/> or <paramref name="outlet"/> is undefined. /// </exception> public FlowShape(Inlet<TIn> inlet, Outlet<TOut> outlet) { Inlet = inlet ?? throw new ArgumentNullException(nameof(inlet), "FlowShape expected non-null inlet"); Outlet = outlet ?? throw new ArgumentNullException(nameof(outlet), "FlowShape expected non-null outlet"); Inlets = ImmutableArray.Create<Inlet>(inlet); Outlets = ImmutableArray.Create<Outlet>(outlet); } Inlet IFlowShape.Inlet => Inlet; Outlet IFlowShape.Outlet => Outlet; /// <summary> /// TBD /// </summary> public Inlet<TIn> Inlet { get; } /// <summary> /// TBD /// </summary> public Outlet<TOut> Outlet { get; } /// <summary> /// TBD /// </summary> public override ImmutableArray<Inlet> Inlets { get; } /// <summary> /// TBD /// </summary> public override ImmutableArray<Outlet> Outlets { get; } /// <summary> /// TBD /// </summary> /// <returns>TBD</returns> public override Shape DeepCopy() => new FlowShape<TIn, TOut>((Inlet<TIn>) Inlet.CarbonCopy(), (Outlet<TOut>) Outlet.CarbonCopy()); /// <summary> /// TBD /// </summary> /// <param name="inlets">TBD</param> /// <param name="outlets">TBD</param> /// <exception cref="ArgumentException"> /// This exception is thrown when the size of the specified <paramref name="inlets"/> array is one /// or the size of the specified <paramref name="outlets"/> array is one. /// </exception> /// <returns>TBD</returns> public override Shape CopyFromPorts(ImmutableArray<Inlet> inlets, ImmutableArray<Outlet> outlets) { if (inlets.Length != 1) throw new ArgumentException("Proposed inlets do not fit FlowShape", nameof(inlets)); if (outlets.Length != 1) throw new ArgumentException("Proposed outlets do not fit FlowShape", nameof(outlets)); return new FlowShape<TIn, TOut>(inlets[0] as Inlet<TIn>, outlets[0] as Outlet<TOut>); } } /// <summary> /// A Sink <see cref="Shape"/> has exactly one input and no outputs, it models a data sink. /// </summary> /// <typeparam name="TIn">TBD</typeparam> public sealed class SinkShape<TIn> : Shape { /// <summary> /// TBD /// </summary> public readonly Inlet<TIn> Inlet; /// <summary> /// TBD /// </summary> /// <param name="inlet">TBD</param> /// <exception cref="ArgumentNullException"> /// This exception is thrown when the specified <paramref name="inlet"/> is undefined. /// </exception> public SinkShape(Inlet<TIn> inlet) { Inlet = inlet ?? throw new ArgumentNullException(nameof(inlet), "SinkShape expected non-null inlet"); Inlets = ImmutableArray.Create<Inlet>(inlet); } /// <summary> /// TBD /// </summary> public override ImmutableArray<Inlet> Inlets { get; } /// <summary> /// TBD /// </summary> public override ImmutableArray<Outlet> Outlets => ImmutableArray<Outlet>.Empty; /// <summary> /// TBD /// </summary> /// <returns>TBD</returns> public override Shape DeepCopy() => new SinkShape<TIn>((Inlet<TIn>) Inlet.CarbonCopy()); /// <summary> /// TBD /// </summary> /// <param name="inlets">TBD</param> /// <param name="outlets">TBD</param> /// <exception cref="ArgumentException"> /// This exception is thrown when the size of the specified <paramref name="inlets"/> array is zero /// or the size of the specified <paramref name="outlets"/> array is one. /// </exception> /// <returns>TBD</returns> public override Shape CopyFromPorts(ImmutableArray<Inlet> inlets, ImmutableArray<Outlet> outlets) { if (outlets.Length != 0) throw new ArgumentException("Proposed outlets do not fit SinkShape", nameof(outlets)); if (inlets.Length != 1) throw new ArgumentException("Proposed inlets do not fit SinkShape", nameof(inlets)); return new SinkShape<TIn>(inlets[0] as Inlet<TIn>); } /// <inheritdoc/> public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) return false; if (ReferenceEquals(this, obj)) return true; return obj is SinkShape<TIn> shape && Equals(shape); } private bool Equals(SinkShape<TIn> other) => Equals(Inlet, other.Inlet); /// <inheritdoc/> public override int GetHashCode() => Inlet.GetHashCode(); } /// <summary> /// A bidirectional flow of elements that consequently has two inputs and two outputs. /// </summary> /// <typeparam name="TIn1">TBD</typeparam> /// <typeparam name="TOut1">TBD</typeparam> /// <typeparam name="TIn2">TBD</typeparam> /// <typeparam name="TOut2">TBD</typeparam> public sealed class BidiShape<TIn1, TOut1, TIn2, TOut2> : Shape { /// <summary> /// TBD /// </summary> public readonly Inlet<TIn1> Inlet1; /// <summary> /// TBD /// </summary> public readonly Inlet<TIn2> Inlet2; /// <summary> /// TBD /// </summary> public readonly Outlet<TOut1> Outlet1; /// <summary> /// TBD /// </summary> public readonly Outlet<TOut2> Outlet2; /// <summary> /// TBD /// </summary> /// <param name="in1">TBD</param> /// <param name="out1">TBD</param> /// <param name="in2">TBD</param> /// <param name="out2">TBD</param> /// <exception cref="ArgumentNullException"> /// This exception is thrown when either the specified <paramref name="in1"/>, <paramref name="out1"/>, /// <paramref name="in2"/>, or <paramref name="out2"/> is undefined. /// </exception> public BidiShape(Inlet<TIn1> in1, Outlet<TOut1> out1, Inlet<TIn2> in2, Outlet<TOut2> out2) { Inlet1 = in1 ?? throw new ArgumentNullException(nameof(in1)); Inlet2 = in2 ?? throw new ArgumentNullException(nameof(in2)); Outlet1 = out1 ?? throw new ArgumentNullException(nameof(out1)); Outlet2 = out2 ?? throw new ArgumentNullException(nameof(out2)); Inlets = ImmutableArray.Create<Inlet>(Inlet1, Inlet2); Outlets = ImmutableArray.Create<Outlet>(Outlet1, Outlet2); } /// <summary> /// TBD /// </summary> /// <param name="top">TBD</param> /// <param name="bottom">TBD</param> public BidiShape(FlowShape<TIn1, TOut1> top, FlowShape<TIn2, TOut2> bottom) : this(top.Inlet, top.Outlet, bottom.Inlet, bottom.Outlet) { } /// <summary> /// TBD /// </summary> public override ImmutableArray<Inlet> Inlets { get; } /// <summary> /// TBD /// </summary> public override ImmutableArray<Outlet> Outlets { get; } /// <summary> /// TBD /// </summary> /// <returns>TBD</returns> public override Shape DeepCopy() { return new BidiShape<TIn1, TOut1, TIn2, TOut2>( (Inlet<TIn1>) Inlet1.CarbonCopy(), (Outlet<TOut1>) Outlet1.CarbonCopy(), (Inlet<TIn2>) Inlet2.CarbonCopy(), (Outlet<TOut2>) Outlet2.CarbonCopy()); } /// <summary> /// TBD /// </summary> /// <param name="inlets">TBD</param> /// <param name="outlets">TBD</param> /// <exception cref="ArgumentException"> /// This exception is thrown when the size of the specified <paramref name="inlets"/> array is two /// or the size of the specified <paramref name="outlets"/> array is two. /// </exception> /// <returns>TBD</returns> public override Shape CopyFromPorts(ImmutableArray<Inlet> inlets, ImmutableArray<Outlet> outlets) { if (inlets.Length != 2) throw new ArgumentException($"Proposed inlets [{string.Join(", ", inlets)}] don't fit BidiShape"); if (outlets.Length != 2) throw new ArgumentException($"Proposed outlets [{string.Join(", ", outlets)}] don't fit BidiShape"); return new BidiShape<TIn1, TOut1, TIn2, TOut2>((Inlet<TIn1>)inlets[0], (Outlet<TOut1>)outlets[0], (Inlet<TIn2>)inlets[1], (Outlet<TOut2>)outlets[1]); } /// <summary> /// TBD /// </summary> /// <returns>TBD</returns> public Shape Reversed() => new BidiShape<TIn2, TOut2, TIn1, TOut1>(Inlet2, Outlet2, Inlet1, Outlet1); } /// <summary> /// TBD /// </summary> public static class BidiShape { /// <summary> /// TBD /// </summary> /// <typeparam name="TIn1">TBD</typeparam> /// <typeparam name="TOut1">TBD</typeparam> /// <typeparam name="TIn2">TBD</typeparam> /// <typeparam name="TOut2">TBD</typeparam> /// <param name="top">TBD</param> /// <param name="bottom">TBD</param> /// <returns>TBD</returns> public static BidiShape<TIn1, TOut1, TIn2, TOut2> FromFlows<TIn1, TOut1, TIn2, TOut2>( FlowShape<TIn1, TOut1> top, FlowShape<TIn2, TOut2> bottom) => new BidiShape<TIn1, TOut1, TIn2, TOut2>(top.Inlet, top.Outlet, bottom.Inlet, bottom.Outlet); } }
/* * Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ /* * Do not modify this file. This file is generated from the swf-2012-01-25.normal.json service model. */ using System; using System.Collections.Generic; using System.Xml.Serialization; using System.Text; using System.IO; using Amazon.Runtime; using Amazon.Runtime.Internal; namespace Amazon.SimpleWorkflow.Model { /// <summary> /// Provides details of <code>WorkflowExecutionStarted</code> event. /// </summary> public partial class WorkflowExecutionStartedEventAttributes { private ChildPolicy _childPolicy; private string _continuedExecutionRunId; private string _executionStartToCloseTimeout; private string _input; private string _lambdaRole; private long? _parentInitiatedEventId; private WorkflowExecution _parentWorkflowExecution; private List<string> _tagList = new List<string>(); private TaskList _taskList; private string _taskPriority; private string _taskStartToCloseTimeout; private WorkflowType _workflowType; /// <summary> /// Gets and sets the property ChildPolicy. /// <para> /// The policy to use for the child workflow executions if this workflow execution is /// terminated, by calling the <a>TerminateWorkflowExecution</a> action explicitly or /// due to an expired timeout. /// </para> /// /// <para> /// The supported child policies are: /// </para> /// <ul> <li><b>TERMINATE:</b> the child executions will be terminated.</li> <li><b>REQUEST_CANCEL:</b> /// a request to cancel will be attempted for each child execution by recording a <code>WorkflowExecutionCancelRequested</code> /// event in its history. It is up to the decider to take appropriate actions when it /// receives an execution history with this event.</li> <li><b>ABANDON:</b> no action /// will be taken. The child executions will continue to run.</li> </ul> /// </summary> public ChildPolicy ChildPolicy { get { return this._childPolicy; } set { this._childPolicy = value; } } // Check to see if ChildPolicy property is set internal bool IsSetChildPolicy() { return this._childPolicy != null; } /// <summary> /// Gets and sets the property ContinuedExecutionRunId. /// <para> /// If this workflow execution was started due to a <code>ContinueAsNewWorkflowExecution</code> /// decision, then it contains the <code>runId</code> of the previous workflow execution /// that was closed and continued as this execution. /// </para> /// </summary> public string ContinuedExecutionRunId { get { return this._continuedExecutionRunId; } set { this._continuedExecutionRunId = value; } } // Check to see if ContinuedExecutionRunId property is set internal bool IsSetContinuedExecutionRunId() { return this._continuedExecutionRunId != null; } /// <summary> /// Gets and sets the property ExecutionStartToCloseTimeout. /// <para> /// The maximum duration for this workflow execution. /// </para> /// /// <para> /// The duration is specified in seconds; an integer greater than or equal to 0. The value /// "NONE" can be used to specify unlimited duration. /// </para> /// </summary> public string ExecutionStartToCloseTimeout { get { return this._executionStartToCloseTimeout; } set { this._executionStartToCloseTimeout = value; } } // Check to see if ExecutionStartToCloseTimeout property is set internal bool IsSetExecutionStartToCloseTimeout() { return this._executionStartToCloseTimeout != null; } /// <summary> /// Gets and sets the property Input. /// <para> /// The input provided to the workflow execution (if any). /// </para> /// </summary> public string Input { get { return this._input; } set { this._input = value; } } // Check to see if Input property is set internal bool IsSetInput() { return this._input != null; } /// <summary> /// Gets and sets the property LambdaRole. /// <para> /// The IAM role attached to this workflow execution to use when invoking AWS Lambda functions. /// </para> /// </summary> public string LambdaRole { get { return this._lambdaRole; } set { this._lambdaRole = value; } } // Check to see if LambdaRole property is set internal bool IsSetLambdaRole() { return this._lambdaRole != null; } /// <summary> /// Gets and sets the property ParentInitiatedEventId. /// <para> /// The ID of the <code>StartChildWorkflowExecutionInitiated</code> event corresponding /// to the <code>StartChildWorkflowExecution</code> decision to start this workflow execution. /// The source event with this ID can be found in the history of the source workflow execution. /// This information can be useful for diagnosing problems by tracing back the chain of /// events leading up to this event. /// </para> /// </summary> public long ParentInitiatedEventId { get { return this._parentInitiatedEventId.GetValueOrDefault(); } set { this._parentInitiatedEventId = value; } } // Check to see if ParentInitiatedEventId property is set internal bool IsSetParentInitiatedEventId() { return this._parentInitiatedEventId.HasValue; } /// <summary> /// Gets and sets the property ParentWorkflowExecution. /// <para> /// The source workflow execution that started this workflow execution. The member is /// not set if the workflow execution was not started by a workflow. /// </para> /// </summary> public WorkflowExecution ParentWorkflowExecution { get { return this._parentWorkflowExecution; } set { this._parentWorkflowExecution = value; } } // Check to see if ParentWorkflowExecution property is set internal bool IsSetParentWorkflowExecution() { return this._parentWorkflowExecution != null; } /// <summary> /// Gets and sets the property TagList. /// <para> /// The list of tags associated with this workflow execution. An execution can have up /// to 5 tags. /// </para> /// </summary> public List<string> TagList { get { return this._tagList; } set { this._tagList = value; } } // Check to see if TagList property is set internal bool IsSetTagList() { return this._tagList != null && this._tagList.Count > 0; } /// <summary> /// Gets and sets the property TaskList. /// <para> /// The name of the task list for scheduling the decision tasks for this workflow execution. /// </para> /// </summary> public TaskList TaskList { get { return this._taskList; } set { this._taskList = value; } } // Check to see if TaskList property is set internal bool IsSetTaskList() { return this._taskList != null; } /// <summary> /// Gets and sets the property TaskPriority. /// </summary> public string TaskPriority { get { return this._taskPriority; } set { this._taskPriority = value; } } // Check to see if TaskPriority property is set internal bool IsSetTaskPriority() { return this._taskPriority != null; } /// <summary> /// Gets and sets the property TaskStartToCloseTimeout. /// <para> /// The maximum duration of decision tasks for this workflow type. /// </para> /// /// <para> /// The duration is specified in seconds; an integer greater than or equal to 0. The value /// "NONE" can be used to specify unlimited duration. /// </para> /// </summary> public string TaskStartToCloseTimeout { get { return this._taskStartToCloseTimeout; } set { this._taskStartToCloseTimeout = value; } } // Check to see if TaskStartToCloseTimeout property is set internal bool IsSetTaskStartToCloseTimeout() { return this._taskStartToCloseTimeout != null; } /// <summary> /// Gets and sets the property WorkflowType. /// <para> /// The workflow type of this execution. /// </para> /// </summary> public WorkflowType WorkflowType { get { return this._workflowType; } set { this._workflowType = value; } } // Check to see if WorkflowType property is set internal bool IsSetWorkflowType() { return this._workflowType != null; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. // =+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+ // // // // Central spin logic used across the entire code-base. // // =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- using System; using System.Runtime.ConstrainedExecution; using System.Threading; using System.Diagnostics; using System.Diagnostics.Contracts; using System.Diagnostics.CodeAnalysis; namespace System.Threading { // SpinWait is just a little value type that encapsulates some common spinning // logic. It ensures we always yield on single-proc machines (instead of using busy // waits), and that we work well on HT. It encapsulates a good mixture of spinning // and real yielding. It's a value type so that various areas of the engine can use // one by allocating it on the stack w/out unnecessary GC allocation overhead, e.g.: // // void f() { // SpinWait wait = new SpinWait(); // while (!p) { wait.SpinOnce(); } // ... // } // // Internally it just maintains a counter that is used to decide when to yield, etc. // // A common usage is to spin before blocking. In those cases, the NextSpinWillYield // property allows a user to decide to fall back to waiting once it returns true: // // void f() { // SpinWait wait = new SpinWait(); // while (!p) { // if (wait.NextSpinWillYield) { /* block! */ } // else { wait.SpinOnce(); } // } // ... // } /// <summary> /// Provides support for spin-based waiting. /// </summary> /// <remarks> /// <para> /// <see cref="SpinWait"/> encapsulates common spinning logic. On single-processor machines, yields are /// always used instead of busy waits, and on computers with Intel processors employing Hyper-Threading /// technology, it helps to prevent hardware thread starvation. SpinWait encapsulates a good mixture of /// spinning and true yielding. /// </para> /// <para> /// <see cref="SpinWait"/> is a value type, which means that low-level code can utilize SpinWait without /// fear of unnecessary allocation overheads. SpinWait is not generally useful for ordinary applications. /// In most cases, you should use the synchronization classes provided by the .NET Framework, such as /// <see cref="System.Threading.Monitor"/>. For most purposes where spin waiting is required, however, /// the <see cref="SpinWait"/> type should be preferred over the <see /// cref="System.Threading.Thread.SpinWait"/> method. /// </para> /// <para> /// While SpinWait is designed to be used in concurrent applications, it is not designed to be /// used from multiple threads concurrently. SpinWait's members are not thread-safe. If multiple /// threads must spin, each should use its own instance of SpinWait. /// </para> /// </remarks> public struct SpinWait { // These constants determine the frequency of yields versus spinning. The // numbers may seem fairly arbitrary, but were derived with at least some // thought in the design document. I fully expect they will need to change // over time as we gain more experience with performance. internal const int YIELD_THRESHOLD = 10; // When to switch over to a true yield. internal const int SLEEP_0_EVERY_HOW_MANY_TIMES = 5; // After how many yields should we Sleep(0)? internal const int SLEEP_1_EVERY_HOW_MANY_TIMES = 20; // After how many yields should we Sleep(1)? // The number of times we've spun already. private int m_count; /// <summary> /// Gets the number of times <see cref="SpinOnce"/> has been called on this instance. /// </summary> public int Count { get { return m_count; } } /// <summary> /// Gets whether the next call to <see cref="SpinOnce"/> will yield the processor, triggering a /// forced context switch. /// </summary> /// <value>Whether the next call to <see cref="SpinOnce"/> will yield the processor, triggering a /// forced context switch.</value> /// <remarks> /// On a single-CPU machine, <see cref="SpinOnce"/> always yields the processor. On machines with /// multiple CPUs, <see cref="SpinOnce"/> may yield after an unspecified number of calls. /// </remarks> public bool NextSpinWillYield { get { return m_count > YIELD_THRESHOLD || PlatformHelper.IsSingleProcessor; } } /// <summary> /// Performs a single spin. /// </summary> /// <remarks> /// This is typically called in a loop, and may change in behavior based on the number of times a /// <see cref="SpinOnce"/> has been called thus far on this instance. /// </remarks> public void SpinOnce() { if (NextSpinWillYield) { // // We must yield. // // We prefer to call Thread.Yield first, triggering a SwitchToThread. This // unfortunately doesn't consider all runnable threads on all OS SKUs. In // some cases, it may only consult the runnable threads whose ideal processor // is the one currently executing code. Thus we occasionally issue a call to // Sleep(0), which considers all runnable threads at equal priority. Even this // is insufficient since we may be spin waiting for lower priority threads to // execute; we therefore must call Sleep(1) once in a while too, which considers // all runnable threads, regardless of ideal processor and priority, but may // remove the thread from the scheduler's queue for 10+ms, if the system is // configured to use the (default) coarse-grained system timer. // int yieldsSoFar = (m_count >= YIELD_THRESHOLD ? m_count - YIELD_THRESHOLD : m_count); if ((yieldsSoFar % SLEEP_1_EVERY_HOW_MANY_TIMES) == (SLEEP_1_EVERY_HOW_MANY_TIMES - 1)) { Thread.Sleep(1); } else if ((yieldsSoFar % SLEEP_0_EVERY_HOW_MANY_TIMES) == (SLEEP_0_EVERY_HOW_MANY_TIMES - 1)) { Thread.Sleep(0); } else { Thread.Yield(); } } else { // // Otherwise, we will spin. // // We do this using the CLR's SpinWait API, which is just a busy loop that // issues YIELD/PAUSE instructions to ensure multi-threaded CPUs can react // intelligently to avoid starving. (These are NOOPs on other CPUs.) We // choose a number for the loop iteration count such that each successive // call spins for longer, to reduce cache contention. We cap the total // number of spins we are willing to tolerate to reduce delay to the caller, // since we expect most callers will eventually block anyway. // Thread.SpinWait(4 << m_count); } // Finally, increment our spin counter. m_count = (m_count == int.MaxValue ? YIELD_THRESHOLD : m_count + 1); } /// <summary> /// Resets the spin counter. /// </summary> /// <remarks> /// This makes <see cref="SpinOnce"/> and <see cref="NextSpinWillYield"/> behave as though no calls /// to <see cref="SpinOnce"/> had been issued on this instance. If a <see cref="SpinWait"/> instance /// is reused many times, it may be useful to reset it to avoid yielding too soon. /// </remarks> public void Reset() { m_count = 0; } #region Static Methods /// <summary> /// Spins until the specified condition is satisfied. /// </summary> /// <param name="condition">A delegate to be executed over and over until it returns true.</param> /// <exception cref="ArgumentNullException">The <paramref name="condition"/> argument is null.</exception> public static void SpinUntil(Func<bool> condition) { #if DEBUG bool result = #endif SpinUntil(condition, Timeout.Infinite); #if DEBUG Debug.Assert(result); #endif } /// <summary> /// Spins until the specified condition is satisfied or until the specified timeout is expired. /// </summary> /// <param name="condition">A delegate to be executed over and over until it returns true.</param> /// <param name="timeout"> /// A <see cref="TimeSpan"/> that represents the number of milliseconds to wait, /// or a TimeSpan that represents -1 milliseconds to wait indefinitely.</param> /// <returns>True if the condition is satisfied within the timeout; otherwise, false</returns> /// <exception cref="ArgumentNullException">The <paramref name="condition"/> argument is null.</exception> /// <exception cref="T:System.ArgumentOutOfRangeException"><paramref name="timeout"/> is a negative number /// other than -1 milliseconds, which represents an infinite time-out -or- timeout is greater than /// <see cref="System.Int32.MaxValue"/>.</exception> public static bool SpinUntil(Func<bool> condition, TimeSpan timeout) { // Validate the timeout Int64 totalMilliseconds = (Int64)timeout.TotalMilliseconds; if (totalMilliseconds < -1 || totalMilliseconds > Int32.MaxValue) { throw new System.ArgumentOutOfRangeException( nameof(timeout), timeout, SR.SpinWait_SpinUntil_TimeoutWrong); } // Call wait with the timeout milliseconds return SpinUntil(condition, (int)timeout.TotalMilliseconds); } /// <summary> /// Spins until the specified condition is satisfied or until the specified timeout is expired. /// </summary> /// <param name="condition">A delegate to be executed over and over until it returns true.</param> /// <param name="millisecondsTimeout">The number of milliseconds to wait, or <see /// cref="System.Threading.Timeout.Infinite"/> (-1) to wait indefinitely.</param> /// <returns>True if the condition is satisfied within the timeout; otherwise, false</returns> /// <exception cref="ArgumentNullException">The <paramref name="condition"/> argument is null.</exception> /// <exception cref="T:System.ArgumentOutOfRangeException"><paramref name="millisecondsTimeout"/> is a /// negative number other than -1, which represents an infinite time-out.</exception> public static bool SpinUntil(Func<bool> condition, int millisecondsTimeout) { if (millisecondsTimeout < Timeout.Infinite) { throw new ArgumentOutOfRangeException( nameof(millisecondsTimeout), millisecondsTimeout, SR.SpinWait_SpinUntil_TimeoutWrong); } if (condition == null) { throw new ArgumentNullException(nameof(condition), SR.SpinWait_SpinUntil_ArgumentNull); } uint startTime = 0; if (millisecondsTimeout != 0 && millisecondsTimeout != Timeout.Infinite) { startTime = TimeoutHelper.GetTime(); } SpinWait spinner = new SpinWait(); while (!condition()) { if (millisecondsTimeout == 0) { return false; } spinner.SpinOnce(); if (millisecondsTimeout != Timeout.Infinite && spinner.NextSpinWillYield) { if (millisecondsTimeout <= (TimeoutHelper.GetTime() - startTime)) { return false; } } } return true; } #endregion } /// <summary> /// A helper class to get the number of processors, it updates the numbers of processors every sampling interval. /// </summary> internal static class PlatformHelper { private const int PROCESSOR_COUNT_REFRESH_INTERVAL_MS = 30000; // How often to refresh the count, in milliseconds. private static volatile int s_processorCount; // The last count seen. private static volatile int s_lastProcessorCountRefreshTicks; // The last time we refreshed. /// <summary> /// Gets the number of available processors /// </summary> [SuppressMessage("Microsoft.Concurrency", "CA8001", Justification = "Reviewed for thread safety")] internal static int ProcessorCount { get { int now = Environment.TickCount; int procCount = s_processorCount; if (procCount == 0 || (now - s_lastProcessorCountRefreshTicks) >= PROCESSOR_COUNT_REFRESH_INTERVAL_MS) { s_processorCount = procCount = Environment.ProcessorCount; s_lastProcessorCountRefreshTicks = now; } Debug.Assert(procCount > 0, "Processor count should be greater than 0."); return procCount; } } /// <summary> /// Gets whether the current machine has only a single processor. /// </summary> internal static bool IsSingleProcessor { get { return ProcessorCount == 1; } } } }
using Content.Server.Construction.Components; using Content.Shared.Construction; using Content.Shared.Construction.Prototypes; using Content.Shared.Construction.Steps; using Robust.Server.Containers; using Robust.Shared.Containers; using Robust.Shared.GameObjects; using Robust.Shared.IoC; using Robust.Shared.Prototypes; namespace Content.Server.Construction { public sealed partial class ConstructionSystem { [Dependency] private readonly ContainerSystem _containerSystem = default!; private void InitializeGraphs() { } public bool AddContainer(EntityUid uid, string container, ConstructionComponent? construction = null) { if (!Resolve(uid, ref construction)) return false; return construction.Containers.Add(container); } public ConstructionGraphPrototype? GetCurrentGraph(EntityUid uid, ConstructionComponent? construction = null) { if (!Resolve(uid, ref construction, false)) return null; return _prototypeManager.TryIndex(construction.Graph, out ConstructionGraphPrototype? graph) ? graph : null; } public ConstructionGraphNode? GetCurrentNode(EntityUid uid, ConstructionComponent? construction = null) { if (!Resolve(uid, ref construction, false)) return null; if (construction.Node is not {} nodeIdentifier) return null; return GetCurrentGraph(uid, construction) is not {} graph ? null : GetNodeFromGraph(graph, nodeIdentifier); } public ConstructionGraphEdge? GetCurrentEdge(EntityUid uid, ConstructionComponent? construction = null) { if (!Resolve(uid, ref construction, false)) return null; if (construction.EdgeIndex is not {} edgeIndex) return null; return GetCurrentNode(uid, construction) is not {} node ? null : GetEdgeFromNode(node, edgeIndex); } public ConstructionGraphStep? GetCurrentStep(EntityUid uid, ConstructionComponent? construction = null) { if (!Resolve(uid, ref construction, false)) return null; if (GetCurrentEdge(uid, construction) is not {} edge) return null; return GetStepFromEdge(edge, construction.StepIndex); } public ConstructionGraphNode? GetTargetNode(EntityUid uid, ConstructionComponent? construction) { if (!Resolve(uid, ref construction)) return null; if (construction.TargetNode is not {} targetNodeId) return null; if (GetCurrentGraph(uid, construction) is not {} graph) return null; return GetNodeFromGraph(graph, targetNodeId); } public ConstructionGraphEdge? GetTargetEdge(EntityUid uid, ConstructionComponent? construction) { if (!Resolve(uid, ref construction)) return null; if (construction.TargetEdgeIndex is not {} targetEdgeIndex) return null; if (GetCurrentNode(uid, construction) is not {} node) return null; return GetEdgeFromNode(node, targetEdgeIndex); } public (ConstructionGraphEdge? edge, ConstructionGraphStep? step) GetCurrentEdgeAndStep(EntityUid uid, ConstructionComponent? construction = null) { if (!Resolve(uid, ref construction, false)) return default; var edge = GetCurrentEdge(uid, construction); if (edge == null) return default; var step = GetStepFromEdge(edge, construction.StepIndex); return (edge, step); } public ConstructionGraphNode? GetNodeFromGraph(ConstructionGraphPrototype graph, string id) { return graph.Nodes.TryGetValue(id, out var node) ? node : null; } public ConstructionGraphEdge? GetEdgeFromNode(ConstructionGraphNode node, int index) { return node.Edges.Count > index ? node.Edges[index] : null; } public ConstructionGraphStep? GetStepFromEdge(ConstructionGraphEdge edge, int index) { return edge.Steps.Count > index ? edge.Steps[index] : null; } public bool ChangeNode(EntityUid uid, EntityUid? userUid, string id, bool performActions = true, ConstructionComponent? construction = null) { if (!Resolve(uid, ref construction)) return false; if (GetCurrentGraph(uid, construction) is not {} graph || GetNodeFromGraph(graph, id) is not {} node) return false; construction.Node = id; if(performActions) PerformActions(uid, userUid, node.Actions); // An action might have deleted the entity... Account for this. if (!Exists(uid)) return false; // ChangeEntity will handle the pathfinding update. if (node.Entity is {} newEntity && ChangeEntity(uid, userUid, newEntity, construction) != null) return true; UpdatePathfinding(uid, construction); return true; } private EntityUid? ChangeEntity(EntityUid uid, EntityUid? userUid, string newEntity, ConstructionComponent? construction = null, MetaDataComponent? metaData = null, TransformComponent? transform = null, ContainerManagerComponent? containerManager = null) { if (!Resolve(uid, ref construction, ref metaData, ref transform)) return null; if (newEntity == metaData.EntityPrototype?.ID || !_prototypeManager.HasIndex<EntityPrototype>(newEntity)) return null; // Optional resolves. Resolve(uid, ref containerManager, false); // We create the new entity. var newUid = EntityManager.SpawnEntity(newEntity, transform.Coordinates); // Construction transferring. var newConstruction = EntityManager.EnsureComponent<ConstructionComponent>(newUid); // We set the graph and node accordingly... Then we append our containers to theirs. ChangeGraph(newUid, userUid, construction.Graph, construction.Node, false, newConstruction); if (construction.TargetNode is {} targetNode) SetPathfindingTarget(newUid, targetNode, newConstruction); // Transfer all construction-owned containers. newConstruction.Containers.UnionWith(construction.Containers); // Transfer all pending interaction events too. while (construction.InteractionQueue.TryDequeue(out var ev)) { newConstruction.InteractionQueue.Enqueue(ev); } // Transform transferring. var newTransform = EntityManager.GetComponent<TransformComponent>(newUid); newTransform.LocalRotation = transform.LocalRotation; newTransform.Anchored = transform.Anchored; // Container transferring. if (containerManager != null) { // Ensure the new entity has a container manager. Also for resolve goodness. var newContainerManager = EntityManager.EnsureComponent<ContainerManagerComponent>(newUid); // Transfer all construction-owned containers from the old entity to the new one. foreach (var container in construction.Containers) { if (!_containerSystem.TryGetContainer(uid, container, out var ourContainer, containerManager)) continue; // NOTE: Only Container is supported by Construction! var otherContainer = _containerSystem.EnsureContainer<Container>(newUid, container, newContainerManager); for (var i = ourContainer.ContainedEntities.Count - 1; i >= 0; i--) { var entity = ourContainer.ContainedEntities[i]; ourContainer.ForceRemove(entity); otherContainer.Insert(entity); } } } EntityManager.QueueDeleteEntity(uid); if(GetCurrentNode(newUid, newConstruction) is {} node) PerformActions(newUid, userUid, node.Actions); return newUid; } public bool ChangeGraph(EntityUid uid, EntityUid? userUid, string graphId, string nodeId, bool performActions = true, ConstructionComponent? construction = null) { if (!Resolve(uid, ref construction)) return false; if (!_prototypeManager.TryIndex<ConstructionGraphPrototype>(graphId, out var graph)) return false; if(GetNodeFromGraph(graph, nodeId) is not {} node) return false; construction.Graph = graphId; return ChangeNode(uid, userUid, nodeId, performActions, construction); } } }
////////////////////////////////////////////////////////////////////////////////////// // Author : Shukri Adams // // Contact : [email protected] // // Compiler requirement : .Net 4.0 // // // // vcFramework : A reuseable library of utility classes // // Copyright (C) // // // // This program is free software; you can redistribute it and/or modify it under // // the terms of the GNU General Public License as published by the Free Software // // Foundation; either version 2 of the License, or (at your option) any later // // version. // // // // This program is distributed in the hope that it will be useful, but WITHOUT ANY // // WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A // // PARTICULAR PURPOSE. See the GNU General Public License for more details. // // // // You should have received a copy of the GNU General Public License along with // // this program; if not, write to the Free Software Foundation, Inc., 59 Temple // // Place, Suite 330, Boston, MA 02111-1307 USA // ////////////////////////////////////////////////////////////////////////////////////// using System; using System.Drawing; using System.Windows.Forms; using System.Xml; namespace vcFramework.Drawing { /// <summary> Summary description for DrawingServices. </summary> public class DrawingLib { /// <summary> Possible positions a point can have outside a rectangle, relative to that rectangle </summary> public enum RectanglePositions: int { Above, AboveRight, Right, BelowRight, Below, BelowLeft, Left, AboveLeft } /// <summary> Determines if a point lies within the bounds of a rectangle. </summary> /// <param name="pntMousePosition"></param> /// <param name="rectRectangle"></param> /// <returns></returns> static public bool PointInRectangle( Point pntMousePosition, Rectangle rectRectangle ) { if (pntMousePosition.X < rectRectangle.Location.X || pntMousePosition.Y < rectRectangle.Location.Y || pntMousePosition.X > rectRectangle.Location.X + rectRectangle.Size.Width || pntMousePosition.Y > rectRectangle.Location.Y + rectRectangle.Size.Height) return false; return true; } /// <summary> Determines where a point lies outside a rectangle, relative to that rectangle </summary> /// <param name="pntMousePosition"></param> /// <param name="rectRectangle"></param> /// <returns></returns> static public RectanglePositions PointPositionOutsideRectangle( Point pntMousePosition, Rectangle rectRectangle ) { if (pntMousePosition.Y < rectRectangle.Y) { // mouse lies above rectangle if (pntMousePosition.X < rectRectangle.Location.X) return RectanglePositions.AboveLeft; else if (pntMousePosition.X > rectRectangle.Location.X + rectRectangle.Size.Width) return RectanglePositions.AboveRight; return RectanglePositions.Above; } else if (pntMousePosition.Y > rectRectangle.Location.Y + rectRectangle.Size.Height) { // mouse lies below rectangle if (pntMousePosition.X < rectRectangle.Location.X) {return RectanglePositions.BelowLeft;} else if (pntMousePosition.X > rectRectangle.Location.X + rectRectangle.Size.Width) {return RectanglePositions.BelowRight;} return RectanglePositions.Below; } else { // mouse is either left or right if (pntMousePosition.X < rectRectangle.Location.X) {return RectanglePositions.Left;} // if reaches here, mouse is to right return RectanglePositions.Right; } } /// <summary> Returns the pixel width of text for a given font</summary> /// <param name="strText"></param> /// <param name="fntTextFont"></param> /// <returns></returns> static public int TextPixelWidth( string strText, Font fntTextFont ) { if (strText.Length == 0) return 0; Bitmap objBitmap = null; try { objBitmap = new Bitmap(1,1) ; return Graphics.FromImage(objBitmap).MeasureString(strText, fntTextFont).ToSize().Width; } finally { objBitmap.Dispose(); objBitmap = null; } } /// <summary> Returns the locatin on a control where a given body of text should be drawn. /// Assumes that text fits on control (does not factor over-width into calculations) </summary> /// <param name="objControl"></param> /// <param name="intPadding"></param> /// <param name="strText"></param> /// <param name="m_TextAlign"></param> /// <returns></returns> static public Point ControlTextLocation( Control objControl, int intPadding, string strText, System.Drawing.ContentAlignment m_TextAlign ) { int intContentsLocationY = 0; int intContentsLocationX = 0; if (m_TextAlign == System.Drawing.ContentAlignment.TopLeft) { intContentsLocationX = intPadding; intContentsLocationY = intPadding; } else if (m_TextAlign == System.Drawing.ContentAlignment.MiddleLeft) { intContentsLocationX = intPadding; intContentsLocationY = objControl.Height/2 - objControl.Font.Height/2; } else if (m_TextAlign == System.Drawing.ContentAlignment.BottomLeft) { intContentsLocationX = intPadding; intContentsLocationY = objControl.Height - intPadding - objControl.Font.Height; } else if (m_TextAlign == System.Drawing.ContentAlignment.TopCenter) { intContentsLocationX = objControl.Width/2 - DrawingLib.TextPixelWidth(strText, objControl.Font)/2; intContentsLocationY = intPadding; } else if (m_TextAlign == System.Drawing.ContentAlignment.MiddleCenter) { intContentsLocationX = objControl.Width/2 - DrawingLib.TextPixelWidth(strText, objControl.Font)/2; intContentsLocationY = objControl.Height/2 - objControl.Font.Height/2; } else if (m_TextAlign == System.Drawing.ContentAlignment.BottomCenter) { intContentsLocationX = objControl.Width/2 - DrawingLib.TextPixelWidth(strText, objControl.Font)/2; intContentsLocationY = objControl.Height - intPadding - objControl.Font.Height; } else if (m_TextAlign == System.Drawing.ContentAlignment.TopRight) { intContentsLocationX = objControl.Width - intPadding - DrawingLib.TextPixelWidth(strText, objControl.Font); intContentsLocationY = intPadding; } else if (m_TextAlign == System.Drawing.ContentAlignment.MiddleRight) { intContentsLocationX = objControl.Width - intPadding - DrawingLib.TextPixelWidth(strText, objControl.Font); intContentsLocationY = objControl.Height/2 - objControl.Font.Height/2; } else if (m_TextAlign == System.Drawing.ContentAlignment.BottomRight) { intContentsLocationX = objControl.Width - intPadding - DrawingLib.TextPixelWidth(strText, objControl.Font); intContentsLocationY = objControl.Height - intPadding - objControl.Font.Height; } Point pntLocation = new Point(intContentsLocationX, intContentsLocationY); return pntLocation; } /// <summary> </summary> /// <param name="strText"></param> /// <param name="fntTextFont"></param> /// <param name="intTextPadding"></param> /// <param name="intMaxWidth"></param> /// <returns></returns> static public string TruncateTextToFitPixelWidth( string strText, Font fntTextFont, int intTextPadding, int intMaxWidth ) { if (strText.Length == 0) return ""; // calculates how much text can be displayed on button if (DrawingLib.TextPixelWidth(strText, fntTextFont) + intTextPadding > intMaxWidth ) { while (DrawingLib.TextPixelWidth(strText, fntTextFont) + intTextPadding > intMaxWidth) { strText = strText.Substring(0, strText.Length - 1); // ensures that loop can terminate if text runs out if (strText.Length == 0) break; } } return strText; } /// <summary> Draws a bevelled outline around a control </summary> /// <param name="objControl"></param> /// <param name="g"></param> /// <param name="clrBorderColor"></param> /// <param name="intBorderThickness"></param> static public void DrawBevelledOutlineOnRectangle( Rectangle rectDrawRectangle, Graphics g, Color clrBorderColor, int intBorderThickness ) { Pen objPen = new Pen(clrBorderColor, intBorderThickness); Brush solidbrush = new SolidBrush(clrBorderColor); // when drawing a line thicker than one pixel, it is easier to draw it as filled rectangle than an actual line Rectangle line; // need to to get the location of the triangle and factor this into where outline is drawn. if draw to graphics // without doing this, everythign will be start off at position (0,0) of the control from which graphics object // was taken int intRctX = rectDrawRectangle.Location.X; int intRctY = rectDrawRectangle.Location.Y; // aborts method if an invalid border thickness is passed - TODO - should probably throw an exception here instead if (intBorderThickness <= 0) return; // if (rectDrawRectangle.Height == 0 || rectDrawRectangle.Width == 0) // return; // border thickness 1 is handled seperately from border thickness > 1 - the former draws proper lines, the latter // draws rectangles as lines. not that cannot draw rectangle of pixel width 1 - it always has a minimum dimension of 2 // because it has a line on all sides if (intBorderThickness == 1) { //top g.DrawLine( objPen, new Point(intRctX + 1, intRctY + 0), new Point(intRctX + rectDrawRectangle.Width -2, intRctY + 0) ); // bottom g.DrawLine( objPen, new Point(intRctX + 1 , intRctY + rectDrawRectangle.Height -1 ), new Point(intRctX + rectDrawRectangle.Width - 2, intRctY + rectDrawRectangle.Height - 1) ); // left g.DrawLine( objPen, new Point(intRctX + 0 , intRctY + 1 ), new Point(intRctX + 0, intRctY + rectDrawRectangle.Height - 2) ); // right g.DrawLine( objPen, new Point(intRctX + rectDrawRectangle.Width -1 , intRctY + 1), new Point(intRctX + rectDrawRectangle.Width - 1, intRctY + rectDrawRectangle.Height - 2) ); // top left bevel g.DrawLine( objPen, new Point(intRctX + 0 , intRctY + 1), new Point(intRctX + 1, intRctY + 1) ); // top right bevel g.DrawLine( objPen, new Point(intRctX + rectDrawRectangle.Width - 2 , intRctY + 1), new Point(intRctX + rectDrawRectangle.Width, intRctY + 1) ); // bottom left bevel g.DrawLine( objPen, new Point(intRctX + 0 , intRctY + rectDrawRectangle.Height - 2), new Point(intRctX + 1, intRctY + rectDrawRectangle.Height - 2) ); // bottom right bevel g.DrawLine( objPen, new Point(intRctX + rectDrawRectangle.Width - 2 , intRctY + rectDrawRectangle.Height - 2), new Point(intRctX + rectDrawRectangle.Width, intRctY + rectDrawRectangle.Height - 2) ); } else { // top line = new Rectangle( intRctX + intBorderThickness, intRctY + 0, rectDrawRectangle.Width - intBorderThickness*2, intBorderThickness ); g.FillRectangle(solidbrush, line); // bottom line = new Rectangle( intRctX + intBorderThickness, intRctY + rectDrawRectangle.Height-intBorderThickness, rectDrawRectangle.Width - intBorderThickness*2, intBorderThickness ); g.FillRectangle(solidbrush, line); // left line = new Rectangle( intRctX + 0, intRctY + intBorderThickness, intBorderThickness, rectDrawRectangle.Height - intBorderThickness*2 ); g.FillRectangle(solidbrush, line); // right line = new Rectangle( intRctX + rectDrawRectangle.Width - intBorderThickness, intRctY + intBorderThickness, intBorderThickness, rectDrawRectangle.Height - intBorderThickness*2 ); g.FillRectangle(solidbrush, line); // top left bevel line = new Rectangle( intRctX + intBorderThickness, intRctY + intBorderThickness, intBorderThickness, intBorderThickness ); g.FillRectangle(solidbrush, line); // top right bevel line = new Rectangle( intRctX + rectDrawRectangle.Width - intBorderThickness*2, intRctY + intBorderThickness, intBorderThickness, intBorderThickness ); g.FillRectangle(solidbrush, line); // bottom left bevel line = new Rectangle( intRctX + intBorderThickness, intRctY + rectDrawRectangle.Height - intBorderThickness*2, intBorderThickness, intBorderThickness ); g.FillRectangle(solidbrush, line); // bottom right bevel line = new Rectangle( intRctX + rectDrawRectangle.Width - intBorderThickness*2, intRctY + rectDrawRectangle.Height - intBorderThickness*2, intBorderThickness, intBorderThickness ); g.FillRectangle(solidbrush, line); } objPen.Dispose(); solidbrush.Dispose(); } /// <summary> Draws a square outline around a control </summary> /// <param name="objControl"></param> /// <param name="g"></param> /// <param name="clrBorderColor"></param> /// <param name="intBorderThickness"></param> static public void DrawBoxOutlineOnRectangle( Rectangle rctDrawRectangle, Graphics g, Color clrBorderColor, int intBorderThickness, int intPushIn ) { if (intBorderThickness <= 0) return; // if (rctDrawRectangle.Height == 0 || rctDrawRectangle.Width == 0) // return; Pen objPen = new Pen(clrBorderColor, intBorderThickness); Brush solidbrush = new SolidBrush(clrBorderColor); Rectangle line; int WidthCorrector = 0; // need to correct for width when not pushing in, ie, when drawing borders right up against edge of control // borders at bottom and right need to pushed out by 1 if drawing there. when not drawing right at edge, // do not need to corrects if (intPushIn == 0) WidthCorrector = 1; if (intBorderThickness == 1) { //top g.DrawLine(objPen, new Point(0 + intPushIn, 0 + intPushIn), new Point(rctDrawRectangle.Width - intPushIn, 0 + intPushIn)); // bottom g.DrawLine(objPen, new Point(0 + intPushIn, rctDrawRectangle.Height - WidthCorrector - intPushIn), new Point(rctDrawRectangle.Width - intPushIn, rctDrawRectangle.Height - WidthCorrector - intPushIn)); // left g.DrawLine(objPen, new Point(0 + intPushIn, 0 + intPushIn), new Point(0 + intPushIn, rctDrawRectangle.Height - intPushIn)); // right g.DrawLine(objPen, new Point(rctDrawRectangle.Width - WidthCorrector - intPushIn, 0 + intPushIn), new Point(rctDrawRectangle.Width - WidthCorrector - intPushIn, rctDrawRectangle.Height - intPushIn)); } else { // top line = new Rectangle(0 + intPushIn, 0 + intPushIn, rctDrawRectangle.Width - intPushIn*2, intBorderThickness); g.FillRectangle(solidbrush, line); // bottom line = new Rectangle(0 + intPushIn, rctDrawRectangle.Height - intBorderThickness - intPushIn, rctDrawRectangle.Width - intPushIn*2, intBorderThickness); g.FillRectangle(solidbrush, line); // left line = new Rectangle(0 + intPushIn, 0 + intPushIn, intBorderThickness, rctDrawRectangle.Height - intPushIn*2); g.FillRectangle(solidbrush, line); // right line = new Rectangle(rctDrawRectangle.Width - intBorderThickness - intPushIn, 0 + intPushIn, intBorderThickness, rctDrawRectangle.Height - intPushIn*2); g.FillRectangle(solidbrush, line); } objPen.Dispose(); solidbrush.Dispose(); } /// <summary> Returns a font for the given Xmlnode of style info </summary> /// <param name="nXmlStyleData"></param> /// <returns></returns> static public Font FontFromStyleXml( XmlNode nXmlStyleData ) { bool blnApplyFontStyle = false; Font returnFont = null; if (nXmlStyleData.SelectSingleNode(".//fontStyle") != null) blnApplyFontStyle = true; if (!blnApplyFontStyle) { // applies font and font size returnFont = new Font( nXmlStyleData.SelectSingleNode(".//fontFamily").InnerText, Convert.ToSingle(nXmlStyleData.SelectSingleNode(".//fontSize").InnerText) ); } else { // default - applies font, font size and font style returnFont = new Font( nXmlStyleData.SelectSingleNode(".//fontFamily").InnerText, Convert.ToSingle(nXmlStyleData.SelectSingleNode(".//fontSize").InnerText), (FontStyle)Enum.Parse(typeof(FontStyle), nXmlStyleData.SelectSingleNode(".//fontStyle").InnerText) ); } return returnFont; } } }