context
stringlengths
2.52k
185k
gt
stringclasses
1 value
#region Copyright notice and license // Copyright 2015 gRPC authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #endregion using System; using System.Collections.Generic; using System.Linq; using System.Threading; using System.Threading.Tasks; using Grpc.Core.Internal; using Grpc.Core.Logging; using Grpc.Core.Utils; namespace Grpc.Core.Internal { internal interface IServerCallHandler { Task HandleCall(ServerRpcNew newRpc, CompletionQueueSafeHandle cq); } internal class UnaryServerCallHandler<TRequest, TResponse> : IServerCallHandler where TRequest : class where TResponse : class { static readonly ILogger Logger = GrpcEnvironment.Logger.ForType<UnaryServerCallHandler<TRequest, TResponse>>(); readonly Method<TRequest, TResponse> method; readonly UnaryServerMethod<TRequest, TResponse> handler; public UnaryServerCallHandler(Method<TRequest, TResponse> method, UnaryServerMethod<TRequest, TResponse> handler) { this.method = method; this.handler = handler; } public async Task HandleCall(ServerRpcNew newRpc, CompletionQueueSafeHandle cq) { var asyncCall = new AsyncCallServer<TRequest, TResponse>( method.ResponseMarshaller.Serializer, method.RequestMarshaller.Deserializer, newRpc.Server); asyncCall.Initialize(newRpc.Call, cq); var finishedTask = asyncCall.ServerSideCallAsync(); var requestStream = new ServerRequestStream<TRequest, TResponse>(asyncCall); var responseStream = new ServerResponseStream<TRequest, TResponse>(asyncCall); Status status; Tuple<TResponse,WriteFlags> responseTuple = null; var context = HandlerUtils.NewContext(newRpc, responseStream, asyncCall.CancellationToken); try { GrpcPreconditions.CheckArgument(await requestStream.MoveNext().ConfigureAwait(false)); var request = requestStream.Current; var response = await handler(request, context).ConfigureAwait(false); status = context.Status; responseTuple = Tuple.Create(response, HandlerUtils.GetWriteFlags(context.WriteOptions)); } catch (Exception e) { if (!(e is RpcException)) { Logger.Warning(e, "Exception occured in handler."); } status = HandlerUtils.GetStatusFromExceptionAndMergeTrailers(e, context.ResponseTrailers); } try { await asyncCall.SendStatusFromServerAsync(status, context.ResponseTrailers, responseTuple).ConfigureAwait(false); } catch (Exception) { asyncCall.Cancel(); throw; } await finishedTask.ConfigureAwait(false); } } internal class ServerStreamingServerCallHandler<TRequest, TResponse> : IServerCallHandler where TRequest : class where TResponse : class { static readonly ILogger Logger = GrpcEnvironment.Logger.ForType<ServerStreamingServerCallHandler<TRequest, TResponse>>(); readonly Method<TRequest, TResponse> method; readonly ServerStreamingServerMethod<TRequest, TResponse> handler; public ServerStreamingServerCallHandler(Method<TRequest, TResponse> method, ServerStreamingServerMethod<TRequest, TResponse> handler) { this.method = method; this.handler = handler; } public async Task HandleCall(ServerRpcNew newRpc, CompletionQueueSafeHandle cq) { var asyncCall = new AsyncCallServer<TRequest, TResponse>( method.ResponseMarshaller.Serializer, method.RequestMarshaller.Deserializer, newRpc.Server); asyncCall.Initialize(newRpc.Call, cq); var finishedTask = asyncCall.ServerSideCallAsync(); var requestStream = new ServerRequestStream<TRequest, TResponse>(asyncCall); var responseStream = new ServerResponseStream<TRequest, TResponse>(asyncCall); Status status; var context = HandlerUtils.NewContext(newRpc, responseStream, asyncCall.CancellationToken); try { GrpcPreconditions.CheckArgument(await requestStream.MoveNext().ConfigureAwait(false)); var request = requestStream.Current; await handler(request, responseStream, context).ConfigureAwait(false); status = context.Status; } catch (Exception e) { if (!(e is RpcException)) { Logger.Warning(e, "Exception occured in handler."); } status = HandlerUtils.GetStatusFromExceptionAndMergeTrailers(e, context.ResponseTrailers); } try { await asyncCall.SendStatusFromServerAsync(status, context.ResponseTrailers, null).ConfigureAwait(false); } catch (Exception) { asyncCall.Cancel(); throw; } await finishedTask.ConfigureAwait(false); } } internal class ClientStreamingServerCallHandler<TRequest, TResponse> : IServerCallHandler where TRequest : class where TResponse : class { static readonly ILogger Logger = GrpcEnvironment.Logger.ForType<ClientStreamingServerCallHandler<TRequest, TResponse>>(); readonly Method<TRequest, TResponse> method; readonly ClientStreamingServerMethod<TRequest, TResponse> handler; public ClientStreamingServerCallHandler(Method<TRequest, TResponse> method, ClientStreamingServerMethod<TRequest, TResponse> handler) { this.method = method; this.handler = handler; } public async Task HandleCall(ServerRpcNew newRpc, CompletionQueueSafeHandle cq) { var asyncCall = new AsyncCallServer<TRequest, TResponse>( method.ResponseMarshaller.Serializer, method.RequestMarshaller.Deserializer, newRpc.Server); asyncCall.Initialize(newRpc.Call, cq); var finishedTask = asyncCall.ServerSideCallAsync(); var requestStream = new ServerRequestStream<TRequest, TResponse>(asyncCall); var responseStream = new ServerResponseStream<TRequest, TResponse>(asyncCall); Status status; Tuple<TResponse,WriteFlags> responseTuple = null; var context = HandlerUtils.NewContext(newRpc, responseStream, asyncCall.CancellationToken); try { var response = await handler(requestStream, context).ConfigureAwait(false); status = context.Status; responseTuple = Tuple.Create(response, HandlerUtils.GetWriteFlags(context.WriteOptions)); } catch (Exception e) { if (!(e is RpcException)) { Logger.Warning(e, "Exception occured in handler."); } status = HandlerUtils.GetStatusFromExceptionAndMergeTrailers(e, context.ResponseTrailers); } try { await asyncCall.SendStatusFromServerAsync(status, context.ResponseTrailers, responseTuple).ConfigureAwait(false); } catch (Exception) { asyncCall.Cancel(); throw; } await finishedTask.ConfigureAwait(false); } } internal class DuplexStreamingServerCallHandler<TRequest, TResponse> : IServerCallHandler where TRequest : class where TResponse : class { static readonly ILogger Logger = GrpcEnvironment.Logger.ForType<DuplexStreamingServerCallHandler<TRequest, TResponse>>(); readonly Method<TRequest, TResponse> method; readonly DuplexStreamingServerMethod<TRequest, TResponse> handler; public DuplexStreamingServerCallHandler(Method<TRequest, TResponse> method, DuplexStreamingServerMethod<TRequest, TResponse> handler) { this.method = method; this.handler = handler; } public async Task HandleCall(ServerRpcNew newRpc, CompletionQueueSafeHandle cq) { var asyncCall = new AsyncCallServer<TRequest, TResponse>( method.ResponseMarshaller.Serializer, method.RequestMarshaller.Deserializer, newRpc.Server); asyncCall.Initialize(newRpc.Call, cq); var finishedTask = asyncCall.ServerSideCallAsync(); var requestStream = new ServerRequestStream<TRequest, TResponse>(asyncCall); var responseStream = new ServerResponseStream<TRequest, TResponse>(asyncCall); Status status; var context = HandlerUtils.NewContext(newRpc, responseStream, asyncCall.CancellationToken); try { await handler(requestStream, responseStream, context).ConfigureAwait(false); status = context.Status; } catch (Exception e) { if (!(e is RpcException)) { Logger.Warning(e, "Exception occured in handler."); } status = HandlerUtils.GetStatusFromExceptionAndMergeTrailers(e, context.ResponseTrailers); } try { await asyncCall.SendStatusFromServerAsync(status, context.ResponseTrailers, null).ConfigureAwait(false); } catch (Exception) { asyncCall.Cancel(); throw; } await finishedTask.ConfigureAwait(false); } } internal class UnimplementedMethodCallHandler : IServerCallHandler { public static readonly UnimplementedMethodCallHandler Instance = new UnimplementedMethodCallHandler(); DuplexStreamingServerCallHandler<byte[], byte[]> callHandlerImpl; public UnimplementedMethodCallHandler() { var marshaller = new Marshaller<byte[]>((payload) => payload, (payload) => payload); var method = new Method<byte[], byte[]>(MethodType.DuplexStreaming, "", "", marshaller, marshaller); this.callHandlerImpl = new DuplexStreamingServerCallHandler<byte[], byte[]>(method, new DuplexStreamingServerMethod<byte[], byte[]>(UnimplementedMethod)); } /// <summary> /// Handler used for unimplemented method. /// </summary> private Task UnimplementedMethod(IAsyncStreamReader<byte[]> requestStream, IServerStreamWriter<byte[]> responseStream, ServerCallContext ctx) { ctx.Status = new Status(StatusCode.Unimplemented, ""); return TaskUtils.CompletedTask; } public Task HandleCall(ServerRpcNew newRpc, CompletionQueueSafeHandle cq) { return callHandlerImpl.HandleCall(newRpc, cq); } } internal static class HandlerUtils { public static Status GetStatusFromExceptionAndMergeTrailers(Exception e, Metadata callContextResponseTrailers) { var rpcException = e as RpcException; if (rpcException != null) { // There are two sources of metadata entries on the server-side: // 1. serverCallContext.ResponseTrailers // 2. trailers in RpcException thrown by user code in server side handler. // As metadata allows duplicate keys, the logical thing to do is // to just merge trailers from RpcException into serverCallContext.ResponseTrailers. foreach (var entry in rpcException.Trailers) { callContextResponseTrailers.Add(entry); } // use the status thrown by handler. return rpcException.Status; } return new Status(StatusCode.Unknown, "Exception was thrown by handler."); } public static WriteFlags GetWriteFlags(WriteOptions writeOptions) { return writeOptions != null ? writeOptions.Flags : default(WriteFlags); } public static ServerCallContext NewContext<TRequest, TResponse>(ServerRpcNew newRpc, ServerResponseStream<TRequest, TResponse> serverResponseStream, CancellationToken cancellationToken) where TRequest : class where TResponse : class { DateTime realtimeDeadline = newRpc.Deadline.ToClockType(ClockType.Realtime).ToDateTime(); return new ServerCallContext(newRpc.Call, newRpc.Method, newRpc.Host, realtimeDeadline, newRpc.RequestMetadata, cancellationToken, serverResponseStream.WriteResponseHeadersAsync, serverResponseStream); } } }
using System; using System.Globalization; using System.Collections.Generic; using Sasoma.Utils; using Sasoma.Microdata.Interfaces; using Sasoma.Languages.Core; using Sasoma.Microdata.Properties; namespace Sasoma.Microdata.Types { /// <summary> /// An article, such as a news article or piece of investigative report. Newspapers and magazines have articles of many different types and this is intended to cover them all. /// </summary> public class Article_Core : TypeCore, ICreativeWork { public Article_Core() { this._TypeId = 20; this._Id = "Article"; this._Schema_Org_Url = "http://schema.org/Article"; string label = ""; GetLabel(out label, "Article", typeof(Article_Core)); this._Label = label; this._Ancestors = new int[]{266,78}; this._SubTypes = new int[]{39,184,233}; this._SuperTypes = new int[]{78}; this._Properties = new int[]{67,108,143,229,0,2,10,12,18,20,24,26,21,50,51,54,57,58,59,61,62,64,70,72,81,97,100,110,115,116,126,138,151,178,179,180,199,211,219,230,231,15,16,235}; } /// <summary> /// The subject matter of the content. /// </summary> private About_Core about; public About_Core About { get { return about; } set { about = value; SetPropertyInstance(about); } } /// <summary> /// Specifies the Person that is legally accountable for the CreativeWork. /// </summary> private AccountablePerson_Core accountablePerson; public AccountablePerson_Core AccountablePerson { get { return accountablePerson; } set { accountablePerson = value; SetPropertyInstance(accountablePerson); } } /// <summary> /// The overall rating, based on a collection of reviews or ratings, of the item. /// </summary> private Properties.AggregateRating_Core aggregateRating; public Properties.AggregateRating_Core AggregateRating { get { return aggregateRating; } set { aggregateRating = value; SetPropertyInstance(aggregateRating); } } /// <summary> /// A secondary title of the CreativeWork. /// </summary> private AlternativeHeadline_Core alternativeHeadline; public AlternativeHeadline_Core AlternativeHeadline { get { return alternativeHeadline; } set { alternativeHeadline = value; SetPropertyInstance(alternativeHeadline); } } /// <summary> /// The actual body of the article. /// </summary> private ArticleBody_Core articleBody; public ArticleBody_Core ArticleBody { get { return articleBody; } set { articleBody = value; SetPropertyInstance(articleBody); } } /// <summary> /// Articles may belong to one or more 'sections' in a magazine or newspaper, such as Sports, Lifestyle, etc. /// </summary> private ArticleSection_Core articleSection; public ArticleSection_Core ArticleSection { get { return articleSection; } set { articleSection = value; SetPropertyInstance(articleSection); } } /// <summary> /// The media objects that encode this creative work. This property is a synonym for encodings. /// </summary> private AssociatedMedia_Core associatedMedia; public AssociatedMedia_Core AssociatedMedia { get { return associatedMedia; } set { associatedMedia = value; SetPropertyInstance(associatedMedia); } } /// <summary> /// An embedded audio object. /// </summary> private Audio_Core audio; public Audio_Core Audio { get { return audio; } set { audio = value; SetPropertyInstance(audio); } } /// <summary> /// The author of this content. Please note that author is special in that HTML 5 provides a special mechanism for indicating authorship via the rel tag. That is equivalent to this and may be used interchangabely. /// </summary> private Author_Core author; public Author_Core Author { get { return author; } set { author = value; SetPropertyInstance(author); } } /// <summary> /// Awards won by this person or for this creative work. /// </summary> private Awards_Core awards; public Awards_Core Awards { get { return awards; } set { awards = value; SetPropertyInstance(awards); } } /// <summary> /// Comments, typically from users, on this CreativeWork. /// </summary> private Comment_Core comment; public Comment_Core Comment { get { return comment; } set { comment = value; SetPropertyInstance(comment); } } /// <summary> /// The location of the content. /// </summary> private ContentLocation_Core contentLocation; public ContentLocation_Core ContentLocation { get { return contentLocation; } set { contentLocation = value; SetPropertyInstance(contentLocation); } } /// <summary> /// Official rating of a piece of content\u2014for example,'MPAA PG-13'. /// </summary> private ContentRating_Core contentRating; public ContentRating_Core ContentRating { get { return contentRating; } set { contentRating = value; SetPropertyInstance(contentRating); } } /// <summary> /// A secondary contributor to the CreativeWork. /// </summary> private Contributor_Core contributor; public Contributor_Core Contributor { get { return contributor; } set { contributor = value; SetPropertyInstance(contributor); } } /// <summary> /// The party holding the legal copyright to the CreativeWork. /// </summary> private CopyrightHolder_Core copyrightHolder; public CopyrightHolder_Core CopyrightHolder { get { return copyrightHolder; } set { copyrightHolder = value; SetPropertyInstance(copyrightHolder); } } /// <summary> /// The year during which the claimed copyright for the CreativeWork was first asserted. /// </summary> private CopyrightYear_Core copyrightYear; public CopyrightYear_Core CopyrightYear { get { return copyrightYear; } set { copyrightYear = value; SetPropertyInstance(copyrightYear); } } /// <summary> /// The creator/author of this CreativeWork or UserComments. This is the same as the Author property for CreativeWork. /// </summary> private Creator_Core creator; public Creator_Core Creator { get { return creator; } set { creator = value; SetPropertyInstance(creator); } } /// <summary> /// The date on which the CreativeWork was created. /// </summary> private DateCreated_Core dateCreated; public DateCreated_Core DateCreated { get { return dateCreated; } set { dateCreated = value; SetPropertyInstance(dateCreated); } } /// <summary> /// The date on which the CreativeWork was most recently modified. /// </summary> private DateModified_Core dateModified; public DateModified_Core DateModified { get { return dateModified; } set { dateModified = value; SetPropertyInstance(dateModified); } } /// <summary> /// Date of first broadcast/publication. /// </summary> private DatePublished_Core datePublished; public DatePublished_Core DatePublished { get { return datePublished; } set { datePublished = value; SetPropertyInstance(datePublished); } } /// <summary> /// A short description of the item. /// </summary> private Description_Core description; public Description_Core Description { get { return description; } set { description = value; SetPropertyInstance(description); } } /// <summary> /// A link to the page containing the comments of the CreativeWork. /// </summary> private DiscussionURL_Core discussionURL; public DiscussionURL_Core DiscussionURL { get { return discussionURL; } set { discussionURL = value; SetPropertyInstance(discussionURL); } } /// <summary> /// Specifies the Person who edited the CreativeWork. /// </summary> private Editor_Core editor; public Editor_Core Editor { get { return editor; } set { editor = value; SetPropertyInstance(editor); } } /// <summary> /// The media objects that encode this creative work /// </summary> private Encodings_Core encodings; public Encodings_Core Encodings { get { return encodings; } set { encodings = value; SetPropertyInstance(encodings); } } /// <summary> /// Genre of the creative work /// </summary> private Genre_Core genre; public Genre_Core Genre { get { return genre; } set { genre = value; SetPropertyInstance(genre); } } /// <summary> /// Headline of the article /// </summary> private Headline_Core headline; public Headline_Core Headline { get { return headline; } set { headline = value; SetPropertyInstance(headline); } } /// <summary> /// URL of an image of the item. /// </summary> private Image_Core image; public Image_Core Image { get { return image; } set { image = value; SetPropertyInstance(image); } } /// <summary> /// The language of the content. please use one of the language codes from the <a href=\http://tools.ietf.org/html/bcp47\>IETF BCP 47 standard.</a> /// </summary> private InLanguage_Core inLanguage; public InLanguage_Core InLanguage { get { return inLanguage; } set { inLanguage = value; SetPropertyInstance(inLanguage); } } /// <summary> /// A count of a specific user interactions with this item\u2014for example, <code>20 UserLikes</code>, <code>5 UserComments</code>, or <code>300 UserDownloads</code>. The user interaction type should be one of the sub types of <a href=\http://schema.org/UserInteraction\>UserInteraction</a>. /// </summary> private InteractionCount_Core interactionCount; public InteractionCount_Core InteractionCount { get { return interactionCount; } set { interactionCount = value; SetPropertyInstance(interactionCount); } } /// <summary> /// Indicates whether this content is family friendly. /// </summary> private IsFamilyFriendly_Core isFamilyFriendly; public IsFamilyFriendly_Core IsFamilyFriendly { get { return isFamilyFriendly; } set { isFamilyFriendly = value; SetPropertyInstance(isFamilyFriendly); } } /// <summary> /// The keywords/tags used to describe this content. /// </summary> private Keywords_Core keywords; public Keywords_Core Keywords { get { return keywords; } set { keywords = value; SetPropertyInstance(keywords); } } /// <summary> /// Indicates that the CreativeWork contains a reference to, but is not necessarily about a concept. /// </summary> private Mentions_Core mentions; public Mentions_Core Mentions { get { return mentions; } set { mentions = value; SetPropertyInstance(mentions); } } /// <summary> /// The name of the item. /// </summary> private Name_Core name; public Name_Core Name { get { return name; } set { name = value; SetPropertyInstance(name); } } /// <summary> /// An offer to sell this item\u2014for example, an offer to sell a product, the DVD of a movie, or tickets to an event. /// </summary> private Offers_Core offers; public Offers_Core Offers { get { return offers; } set { offers = value; SetPropertyInstance(offers); } } /// <summary> /// Specifies the Person or Organization that distributed the CreativeWork. /// </summary> private Provider_Core provider; public Provider_Core Provider { get { return provider; } set { provider = value; SetPropertyInstance(provider); } } /// <summary> /// The publisher of the creative work. /// </summary> private Publisher_Core publisher; public Publisher_Core Publisher { get { return publisher; } set { publisher = value; SetPropertyInstance(publisher); } } /// <summary> /// Link to page describing the editorial principles of the organization primarily responsible for the creation of the CreativeWork. /// </summary> private PublishingPrinciples_Core publishingPrinciples; public PublishingPrinciples_Core PublishingPrinciples { get { return publishingPrinciples; } set { publishingPrinciples = value; SetPropertyInstance(publishingPrinciples); } } /// <summary> /// Review of the item. /// </summary> private Reviews_Core reviews; public Reviews_Core Reviews { get { return reviews; } set { reviews = value; SetPropertyInstance(reviews); } } /// <summary> /// The Organization on whose behalf the creator was working. /// </summary> private SourceOrganization_Core sourceOrganization; public SourceOrganization_Core SourceOrganization { get { return sourceOrganization; } set { sourceOrganization = value; SetPropertyInstance(sourceOrganization); } } /// <summary> /// A thumbnail image relevant to the Thing. /// </summary> private ThumbnailURL_Core thumbnailURL; public ThumbnailURL_Core ThumbnailURL { get { return thumbnailURL; } set { thumbnailURL = value; SetPropertyInstance(thumbnailURL); } } /// <summary> /// URL of the item. /// </summary> private Properties.URL_Core uRL; public Properties.URL_Core URL { get { return uRL; } set { uRL = value; SetPropertyInstance(uRL); } } /// <summary> /// The version of the CreativeWork embodied by a specified resource. /// </summary> private Version_Core version; public Version_Core Version { get { return version; } set { version = value; SetPropertyInstance(version); } } /// <summary> /// An embedded video object. /// </summary> private Video_Core video; public Video_Core Video { get { return video; } set { video = value; SetPropertyInstance(video); } } /// <summary> /// The number of words in the text of the Article. /// </summary> private WordCount_Core wordCount; public WordCount_Core WordCount { get { return wordCount; } set { wordCount = value; SetPropertyInstance(wordCount); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. // // System.Drawing.PrintDocument.cs // // Authors: // Dennis Hayes ([email protected]) // Herve Poussineau ([email protected]) // Andreas Nahr ([email protected]) // // (C) 2002 Ximian, Inc // // // Copyright (C) 2004 Novell, Inc (http://www.novell.com) // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.ComponentModel; namespace System.Drawing.Printing { public class PrintDocument : System.ComponentModel.Component { private PageSettings defaultpagesettings; private PrinterSettings printersettings; private PrintController printcontroller; private string documentname; private bool originAtMargins = false; // .NET V1.1 Beta public PrintDocument() { documentname = "document"; //offical default. printersettings = new PrinterSettings(); // use default values defaultpagesettings = (PageSettings)printersettings.DefaultPageSettings.Clone(); printcontroller = new StandardPrintController(); } // properties [DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)] [Browsable(false)] [SRDescription("The settings for the current page.")] public PageSettings DefaultPageSettings { get { return defaultpagesettings; } set { defaultpagesettings = value; } } // Name of the document, not the file! [DefaultValue("document")] [SRDescription("The name of the document.")] public string DocumentName { get { return documentname; } set { documentname = value; } } [DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)] [Browsable(false)] [SRDescription("The print controller object.")] public PrintController PrintController { get { return printcontroller; } set { printcontroller = value; } } [DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)] [Browsable(false)] [SRDescription("The current settings for the active printer.")] public PrinterSettings PrinterSettings { get { return printersettings; } set { printersettings = value == null ? new PrinterSettings() : value; } } [DefaultValue(false)] [SRDescription("Determines if the origin is set at the specified margins.")] public bool OriginAtMargins { get { return originAtMargins; } set { originAtMargins = value; } } // methods public void Print() { PrintEventArgs printArgs = new PrintEventArgs(); this.OnBeginPrint(printArgs); if (printArgs.Cancel) return; PrintController.OnStartPrint(this, printArgs); if (printArgs.Cancel) return; Graphics g = null; if (printArgs.GraphicsContext != null) { g = Graphics.FromHdc(printArgs.GraphicsContext.Hdc); printArgs.GraphicsContext.Graphics = g; } // while there are more pages PrintPageEventArgs printPageArgs; do { QueryPageSettingsEventArgs queryPageSettingsArgs = new QueryPageSettingsEventArgs( DefaultPageSettings.Clone() as PageSettings); OnQueryPageSettings(queryPageSettingsArgs); PageSettings pageSettings = queryPageSettingsArgs.PageSettings; printPageArgs = new PrintPageEventArgs( g, pageSettings.Bounds, new Rectangle(0, 0, pageSettings.PaperSize.Width, pageSettings.PaperSize.Height), pageSettings); // TODO: We should create a graphics context for each page since they can have diferent paper // size, orientation, etc. We use a single graphic for now to keep Cairo using a single PDF file. printPageArgs.GraphicsContext = printArgs.GraphicsContext; Graphics pg = PrintController.OnStartPage(this, printPageArgs); // assign Graphics in printPageArgs printPageArgs.SetGraphics(pg); if (!printPageArgs.Cancel) this.OnPrintPage(printPageArgs); PrintController.OnEndPage(this, printPageArgs); if (printPageArgs.Cancel) break; } while (printPageArgs.HasMorePages); this.OnEndPrint(printArgs); PrintController.OnEndPrint(this, printArgs); } public override string ToString() { return "[PrintDocument " + this.DocumentName + "]"; } // events protected virtual void OnBeginPrint(PrintEventArgs e) { //fire the event if (BeginPrint != null) BeginPrint(this, e); } protected virtual void OnEndPrint(PrintEventArgs e) { //fire the event if (EndPrint != null) EndPrint(this, e); } protected virtual void OnPrintPage(PrintPageEventArgs e) { //fire the event if (PrintPage != null) PrintPage(this, e); } protected virtual void OnQueryPageSettings(QueryPageSettingsEventArgs e) { //fire the event if (QueryPageSettings != null) QueryPageSettings(this, e); } [SRDescription("Raised when printing begins")] public event PrintEventHandler BeginPrint; [SRDescription("Raised when printing ends")] public event PrintEventHandler EndPrint; [SRDescription("Raised when printing of a new page begins")] public event PrintPageEventHandler PrintPage; [SRDescription("Raised before printing of a new page begins")] public event QueryPageSettingsEventHandler QueryPageSettings; } }
// // SourceManager.cs // // Author: // Aaron Bockover <[email protected]> // // Copyright (C) 2005-2007 Novell, Inc. // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.Collections.Generic; using Mono.Unix; using Mono.Addins; using Hyena; using Banshee.ServiceStack; using Banshee.Library; namespace Banshee.Sources { public delegate void SourceEventHandler(SourceEventArgs args); public delegate void SourceAddedHandler(SourceAddedArgs args); public class SourceEventArgs : EventArgs { public Source Source; } public class SourceAddedArgs : SourceEventArgs { public int Position; } public class SourceManager : /*ISourceManager,*/ IInitializeService, IRequiredService, IDBusExportable, IDisposable { private List<Source> sources = new List<Source>(); private List<GroupSource> group_sources = new List<GroupSource> (); private Dictionary<string, Source> extension_sources = new Dictionary<string, Source> (); private Source active_source; private Source default_source; private MusicLibrarySource music_library; private VideoLibrarySource video_library; public event SourceEventHandler SourceUpdated; public event SourceAddedHandler SourceAdded; public event SourceEventHandler SourceRemoved; public event SourceEventHandler ActiveSourceChanged; public class GroupSource : Source { public GroupSource (string name, int order) : base (name, name, order) { TypeUniqueId = order.ToString (); } } public void Initialize () { // TODO should add library sources here, but requires changing quite a few // things that depend on being loaded before the music library is added. //AddSource (music_library = new MusicLibrarySource (), true); //AddSource (video_library = new VideoLibrarySource (), false); group_sources.Add (new GroupSource (Catalog.GetString ("Online Media"), 60)); group_sources.Add (new GroupSource (Catalog.GetString ("Libraries"), 39)); } internal void LoadExtensionSources () { lock (this) { AddinManager.AddExtensionNodeHandler ("/Banshee/SourceManager/Source", OnExtensionChanged); } } public void Dispose () { lock (this) { try { AddinManager.RemoveExtensionNodeHandler ("/Banshee/SourceManager/Source", OnExtensionChanged); } catch {} active_source = null; default_source = null; music_library = null; video_library = null; // Do dispose extension sources foreach (Source source in extension_sources.Values) { RemoveSource (source, true); } // But do not dispose non-extension sources while (sources.Count > 0) { RemoveSource (sources[0], false); } sources.Clear (); extension_sources.Clear (); } } private void OnExtensionChanged (object o, ExtensionNodeEventArgs args) { lock (this) { TypeExtensionNode node = (TypeExtensionNode)args.ExtensionNode; if (args.Change == ExtensionChange.Add && !extension_sources.ContainsKey (node.Id)) { try { Source source = (Source)node.CreateInstance (); extension_sources.Add (node.Id, source); if (source.Properties.Get<bool> ("AutoAddSource", true)) { AddSource (source); } Log.DebugFormat ("Extension source loaded: {0}", source.Name); } catch {} } else if (args.Change == ExtensionChange.Remove && extension_sources.ContainsKey (node.Id)) { Source source = extension_sources[node.Id]; extension_sources.Remove (node.Id); RemoveSource (source, true); Log.DebugFormat ("Extension source unloaded: {0}", source.Name); } } } public void AddSource(Source source) { AddSource(source, false); } public void AddSource(Source source, bool isDefault) { ThreadAssist.AssertInMainThread (); if(source == null || ContainsSource (source)) { return; } GroupSource group_source = source as GroupSource; if (group_source != null && !group_sources.Contains (group_source)) { group_sources.Add (group_source); return; } AddSource (FindAssociatedGroupSource (source.Order)); int position = FindSourceInsertPosition(source); sources.Insert(position, source); if(isDefault) { default_source = source; } source.Updated += OnSourceUpdated; source.ChildSourceAdded += OnChildSourceAdded; source.ChildSourceRemoved += OnChildSourceRemoved; if (source is MusicLibrarySource) { music_library = source as MusicLibrarySource; } else if (source is VideoLibrarySource) { video_library = source as VideoLibrarySource; } SourceAdded.SafeInvoke (new SourceAddedArgs () { Position = position, Source = source }); IDBusExportable exportable = source as IDBusExportable; if (exportable != null) { ServiceManager.DBusServiceManager.RegisterObject (exportable); } List<Source> children = new List<Source> (source.Children); foreach(Source child_source in children) { AddSource (child_source, false); } if(isDefault && ActiveSource == null) { SetActiveSource(source); } } public void RemoveSource (Source source) { RemoveSource (source, false); } public void RemoveSource (Source source, bool recursivelyDispose) { if(source == null || !ContainsSource (source)) { return; } if(source == default_source) { default_source = null; } source.Updated -= OnSourceUpdated; source.ChildSourceAdded -= OnChildSourceAdded; source.ChildSourceRemoved -= OnChildSourceRemoved; sources.Remove(source); GroupSource associated_groupsource = FindAssociatedGroupSource (source.Order); if (!GroupSourceHasMembers (associated_groupsource)) { RemoveSource (associated_groupsource, recursivelyDispose); } foreach(Source child_source in source.Children) { RemoveSource (child_source, recursivelyDispose); } IDBusExportable exportable = source as IDBusExportable; if (exportable != null) { ServiceManager.DBusServiceManager.UnregisterObject (exportable); } if (recursivelyDispose) { IDisposable disposable = source as IDisposable; if (disposable != null) { disposable.Dispose (); } } ThreadAssist.ProxyToMain (delegate { if(source == active_source) { if (source.Parent != null && source.Parent.CanActivate) { SetActiveSource(source.Parent); } else { SetActiveSource(default_source); } } SourceEventHandler handler = SourceRemoved; if(handler != null) { SourceEventArgs args = new SourceEventArgs(); args.Source = source; handler(args); } }); } public void RemoveSource(Type type) { Queue<Source> remove_queue = new Queue<Source>(); foreach(Source source in Sources) { if(source.GetType() == type) { remove_queue.Enqueue(source); } } while(remove_queue.Count > 0) { RemoveSource(remove_queue.Dequeue()); } } public bool ContainsSource(Source source) { return sources.Contains(source); } private void OnSourceUpdated(object o, EventArgs args) { ThreadAssist.ProxyToMain (delegate { SourceEventHandler handler = SourceUpdated; if(handler != null) { SourceEventArgs evargs = new SourceEventArgs(); evargs.Source = o as Source; handler(evargs); } }); } private void OnChildSourceAdded(SourceEventArgs args) { AddSource (args.Source); } private void OnChildSourceRemoved(SourceEventArgs args) { RemoveSource (args.Source); } private GroupSource FindAssociatedGroupSource (int order) { int current_order = -1; GroupSource associated_groupsource = null; foreach (GroupSource source in group_sources){ if (order == source.Order) { return null; } if (order > source.Order && current_order < source.Order) { associated_groupsource = source; current_order = source.Order; } } return associated_groupsource; } private bool GroupSourceHasMembers (GroupSource group_source) { Source source = group_source as Source; if (group_source == null || !sources.Contains (source)) { return false; } int source_index = FindSourceInsertPosition (source); if (source_index < sources.Count - 1) { Source next_source = sources[source_index + 1]; GroupSource associated_groupsource = FindAssociatedGroupSource (next_source.Order); return group_source.Equals (associated_groupsource); } else { return false; } } private int FindSourceInsertPosition(Source source) { for(int i = sources.Count - 1; i >= 0; i--) { if((sources[i] as Source).Order == source.Order) { return i; } } for(int i = 0; i < sources.Count; i++) { if((sources[i] as Source).Order >= source.Order) { return i; } } return sources.Count; } public Source DefaultSource { get { return default_source; } set { default_source = value; } } public MusicLibrarySource MusicLibrary { get { return music_library; } } public VideoLibrarySource VideoLibrary { get { return video_library; } } public Source ActiveSource { get { return active_source; } } /*ISource ISourceManager.DefaultSource { get { return DefaultSource; } } ISource ISourceManager.ActiveSource { get { return ActiveSource; } set { value.Activate (); } }*/ public void SetActiveSource(Source source) { SetActiveSource(source, true); } public void SetActiveSource(Source source, bool notify) { ThreadAssist.AssertInMainThread (); if(source == null || !source.CanActivate || active_source == source) { return; } if(active_source != null) { active_source.Deactivate(); } active_source = source; if (source.Parent != null) { source.Parent.Expanded = true; } if(!notify) { source.Activate(); return; } SourceEventHandler handler = ActiveSourceChanged; if(handler != null) { SourceEventArgs args = new SourceEventArgs(); args.Source = active_source; handler(args); } source.Activate(); } public IEnumerable<T> FindSources<T> () where T : Source { foreach (Source source in Sources) { T t_source = source as T; if (t_source != null) { yield return t_source; } } } public ICollection<Source> Sources { get { return sources; } } /*string [] ISourceManager.Sources { get { return DBusServiceManager.MakeObjectPathArray<Source>(sources); } }*/ IDBusExportable IDBusExportable.Parent { get { return null; } } string Banshee.ServiceStack.IService.ServiceName { get { return "SourceManager"; } } } }
/* * Copyright 2005 OpenXRI Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ namespace DotNetXri.Client.Xml { using org.openxri.xml.Service; using org.openxri.xml.XRD; using junit.framework.Test; using junit.framework.TestCase; using junit.framework.TestSuite; using junit.textui.TestRunner; /* ******************************************************************************** * Class: ParseTest ******************************************************************************** */ /** * @author =chetan * * To change the template for this generated type comment go to * Window>Preferences>Java>Code Generation>Code and Comments */ public class ParseTest :TestCase { /* **************************************************************************** * main() **************************************************************************** */ /** * */ public static void main(String[] args) { // Pass control to the non-graphical test runner TestRunner.run(suite()); } // main() /* **************************************************************************** * suite() **************************************************************************** */ /** * */ public static Test suite() { return new TestSuite(typeof(ParseTest)); } // suite() // suite() /* **************************************************************************** * testSerialization() **************************************************************************** */ /** * */ public void testSerialization() { //J- String sVal = "<XRD\n" + " xml:id=\"1f81b6e0-b64b-1026-f1bc-c0a80b9d3f5b\">\n" + " <Query>.foo</Query>\n" + " <Status code=\"0\"/>\n" + " <ProviderID>\n" + " urn:uuid:D5CFA9CB-F43B-228C-3CEE-C5E9F2D3CB15\n" + " </ProviderID>\n" + " <Service>\n" + " <ProviderID>\n" + " urn:uuid:C5C9EECF-A3BC-4883-8852-8EECB54CE1D5\n" + " </ProviderID>\n" + " <Type>xri://$res*auth*($v*2.0)</Type>\n" + " <MediaType>application/xrds+xml;trust=none</MediaType>\n" + " <MediaType>application/xrds+xml;trust=saml</MediaType>\n" + " <MediaType>application/xrds+xml;trust=https</MediaType>\n" + " <MediaType>application/xrds+xml;trust=saml+https</MediaType>\n" + " <URI>http://test.epok.net/foo/bar</URI>\n" + " <URI>https://test.epok.net/foo/bar</URI>\n" + " <URI>http://test.epok.net/foo/bar</URI>\n" + " <ds:KeyInfo\n" + " xmlns:ds=\"http://www.w3.org/2000/09/xmldsig#\">\n" + " </ds:KeyInfo>\n" + " </Service>\n" + " <Service>\n" + " <Type>xri:@epok/foo</Type>\n" + " <MediaType>application/vnd.epok.foo</MediaType>\n" + " </Service>\n" + " <Service>\n" + " <Type>xri:@epok/foo2</Type>\n" + " <MediaType>application/vnd.epok.foo2</MediaType>\n" + " <URI>http://foo2.epok.net/foo/bar</URI>\n" + " <Custom>Custom Data</Custom>\n" + " <Custom2>Custom Data2</Custom2>\n" + " </Service>\n" + " <LocalID>xri://@!1!3/!5/*internal</LocalID>" + " <Ref>xri://@!1!3/!5/*external</Ref>" + "</XRD>\n"; //J+ try { XRD oDesc = XRD.parseXRD(sVal, false); // Resolved assertTrue( "Resolved component failed. Expected .foo, got " + oDesc.getQuery(), oDesc.getQuery().Equals(".foo")); // AuthorityID assertEquals( "urn:uuid:D5CFA9CB-F43B-228C-3CEE-C5E9F2D3CB15", oDesc.getProviderID().trim()); Service authService = oDesc.getServiceAt(0); // Authority assertNotNull(authService); // Authority/AuthorityID assertEquals( "urn:uuid:C5C9EECF-A3BC-4883-8852-8EECB54CE1D5", authService.getProviderId().trim()); // Authority/URI(s) assertEquals(3, authService.getNumURIs()); assertEquals( "http://test.epok.net/foo/bar", authService.getURIForScheme("http").getURI().toString()); assertEquals( "https://test.epok.net/foo/bar", authService.getURIForScheme("https").getURI().toString()); // Authority/URI(s) /* assertEquals(3, oDesc.getServiceForType(Tags.SERVICE_AUTH_TRUSTED).getNumURIs()); assertEquals( "http://test.epok.net/foo/bar", oDesc.getXRIAuthority().getTrustedURIForScheme("http") + ""); */ // Authority/KeyInfo assertNotNull(authService.getKeyInfo()); // Services Service oLA = oDesc.getServiceAt(1); assertTrue("Incorrect type", oLA.getTypeAt(0).getType().Equals("xri:@epok/foo")); assertTrue("Incorrect URI for Local Access", oLA.getNumURIs() == 0); oLA = oDesc.getServiceAt(2); assertTrue("Incorrect type", oLA.getTypeAt(0).getType().Equals("xri:@epok/foo2")); assertTrue( "Incorrect URI for Local Access2", oLA.getNumURIs() == 1); assertTrue( "Incorrect Value for Custom Data", oLA.getOtherTagValues("Custom") != null); assertTrue( "Incorrect Value for Custom Data2", oLA.getOtherTagValues("Custom2") != null); // Synonyms assertTrue( "Incorrect # for Internal", oDesc.getNumLocalIDs() == 1); assertTrue( "Incorrect # for External", oDesc.getNumRefs() == 1); Logger.Info(oDesc.toString()); } catch (Exception e) { e.printStackTrace(); assertTrue("Caught unexpected exception " + e.toString(), false); } } // testSerialization() /* **************************************************************************** * testSignedDescriptor() **************************************************************************** */ /** * */ public void testSignedDescriptor() { //J- String sVal = "" + "<XRD\n" + " xmlns=\"\"\n" + " DescriptorID=\"a93853082806b81c173c1434c984fe20\"\n" + " id=\"#foo\">\n" + " <Authority xmlns=\"\">\n" + " <URI xmlns=\"\">http://xri.epok.com</URI>\n" + " </Authority>\n" + " <Assertion\n" + " xmlns=\"urn:oasis:names:tc:SAML:2.0:assertion\"\n" + " xmlns:saml=\"urn:oasis:names:tc:SAML:2.0:assertion\"\n" + " xmlns:samlp=\"urn:oasis:names:tc:SAML:2.0:protocol\"\n" + " ID=\"ff83e3a7a1ac06392653a1b2147cc535\"\n" + " IssueInstant=\"2005-01-27T21:29:10.384Z\"\n" + " MajorVersion=\"2\"\n" + " MinorVersion=\"0\">\n" + " <Issuer>World's Best</Issuer>\n" + " <Subject>\n" + " <NameID>\n" + " .epok\n" + " </NameID>\n" + " </Subject>\n" + " <ds:Signature\n" + " xmlns:ds=\"http://www.w3.org/2000/09/xmldsig#\">\n" + " <ds:SignedInfo>\n" + " <ds:CanonicalizationMethod\n" + " Algorithm=\"http://www.w3.org/2001/10/xml-exc-c14n#\"/>\n" + " <ds:SignatureMethod\n" + " Algorithm=\"http://www.w3.org/2000/09/xmldsig#rsa-sha1\"/>\n" + " <ds:Reference\n" + " URI=\"#a93853082806b81c173c1434c984fe20\">\n" + " <ds:Transforms>\n" + " <ds:Transform\n" + " Algorithm=\"http://www.w3.org/2000/09/xmldsig#enveloped-signature\"/>\n" + " <ds:Transform\n" + " Algorithm=\"http://www.w3.org/2001/10/xml-exc-c14n#\">\n" + " <ec:InclusiveNamespaces\n" + " xmlns:ds=\"http://www.w3.org/2000/09/xmldsig#\"\n" + " xmlns:ec=\"http://www.w3.org/2001/10/xml-exc-c14n#\"\n" + " PrefixList=\"#default code ds kind rw saml samlp typens\"/>\n" + " </ds:Transform>\n" + " </ds:Transforms>\n" + " <ds:DigestMethod\n" + " Algorithm=\"http://www.w3.org/2000/09/xmldsig#sha1\"/>\n" + " <ds:DigestValue>\n" + " p6QbBu6uKTfM6pQ73mgBlyzJOYE=\n" + " </ds:DigestValue>\n" + " </ds:Reference>\n" + " </ds:SignedInfo>\n" + " <ds:SignatureValue>\n" + "l94Yfpu5RIexeBywCH1mPyVlOgotqwdEhqdXsmxyDeLyw2RjkT671rkVv102YEMyLghudpC8GE75\n" + "tvcIc3Nm7q+7vX8ygdPxoyHlSnQda7yblgcW1EtPQWKD5sor5ue9hGHVukvtgXS8WRcIa4AYhnyM\n" + "onchjRVZwx7+AjnHyFs=\n" + " </ds:SignatureValue>\n" + " </ds:Signature>\n" + " <AttributeStatement>\n" + " <Attribute\n" + " Name=\"XRD\">\n" + " <AttributeValue>\n" + " #foo\n" + " </AttributeValue>\n" + " </Attribute>\n" + " </AttributeStatement>\n" + " </Assertion>\n" + "</XRD>\n"; //J+ try { XRD oDesc = XRD.parseXRD(sVal, false); assertNotNull(oDesc.getSAMLAssertion()); Logger.Info(oDesc.toString()); } catch (Exception e) { e.printStackTrace(); assertTrue("Caught unexpected exception " + e.toString(), false); } } // testSignedDescriptor() } // Class: ParseTest }
// Copyright (c) .NET Foundation. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for details. using System; using System.Diagnostics; using System.Globalization; using System.Text; using System.Text.RegularExpressions; using mshtml; using OpenLiveWriter.CoreServices; using OpenLiveWriter.CoreServices.HTML; using OpenLiveWriter.HtmlParser.Parser; using OpenLiveWriter.Mshtml; namespace OpenLiveWriter.HtmlEditor.Marshalling.Data_Handlers { public class HtmlHandler : FreeTextHandler { public HtmlHandler(DataObjectMeister dataObject, DataFormatHandlerContext handlerContext, IHtmlMarshallingTarget editorContext) : base(dataObject, handlerContext, editorContext) { } /// <summary> /// Is there URL data in the passed data object? /// </summary> /// <param name="data">data</param> /// <returns>true if there is url data, else false</returns> public static bool CanCreateFrom(DataObjectMeister data) { return data.HTMLData != null; } private static bool IsOfficeHtml(HTMLData data) { string generator = data.HTMLMetaData.Generator; if (String.IsNullOrEmpty(generator)) return false; return (generator.StartsWith("Microsoft Word") || generator.StartsWith("Microsoft Excel") || generator.StartsWith("Microsoft PowerPoint")); } /// <summary> /// Grabs HTML copied in the clipboard and pastes it into the document (pulls in a copy of embedded content too) /// </summary> protected override bool DoInsertData(DataAction action, MarkupPointer begin, MarkupPointer end) { using (new WaitCursor()) { try { string baseUrl = UrlHelper.GetBasePathUrl(DataMeister.HTMLData.SourceURL); string html = DataMeister.HTMLData.HTMLSelection; //Check to see if the selection has an incomplete unordered list var finder = new IncompleteListFinder(html); finder.Parse(); if ((!EditorContext.CleanHtmlOnPaste) || finder.HasIncompleteList) { using (IUndoUnit undoUnit = EditorContext.CreateInvisibleUndoUnit()) { // Create a new MarkupContainer off of EditorContext's document that contains the source HTML // with comments marking the start and end selection. MarkupContainer sourceContainer = EditorContext.MarkupServices.ParseString(DataMeister.HTMLData.HTMLWithMarkers); // MSHTML's ParseString implementation clears all the attributes on the <body> element, so we // have to manually add them back in. CopyBodyAttributes(DataMeister.HTMLData.HTMLWithMarkers, sourceContainer.Document.body); MarkupRange sourceRange = FindMarkedFragment(sourceContainer.Document, HTMLDataObject.START_FRAGMENT_MARKER, HTMLDataObject.END_FRAGMENT_MARKER); MshtmlMarkupServices sourceContainerMarkupServices = new MshtmlMarkupServices((IMarkupServicesRaw)sourceContainer.Document); // Some applications may not add the correct fragment markers (e.g. copying from Fiddler from // the Web Sessions view). We'll just select the entire <body> of the clipboard in this case. if (sourceRange == null) { sourceRange = sourceContainerMarkupServices.CreateMarkupRange(sourceContainer.Document.body, false); } else { // Make sure that we don't try to copy just parts of a table/list. We need to include the // parent table/list. if (!EditorContext.CleanHtmlOnPaste) { ExpandToIncludeTables(sourceRange, sourceContainerMarkupServices); } ExpandToIncludeLists(sourceRange, sourceContainerMarkupServices); } if (sourceRange != null) { if (!EditorContext.CleanHtmlOnPaste) { // WinLive 273280: Alignment on a table acts like a float, which can throw off the layout of the rest of // the document. If there is nothing before or after the table, then we can safely remove the alignment. RemoveAlignmentIfSingleTable(sourceRange); // Serialize the source HTML to a string while keeping the source formatting. MarkupRange destinationRange = EditorContext.MarkupServices.CreateMarkupRange(begin.Clone(), end.Clone()); html = KeepSourceFormatting(sourceRange, destinationRange); } else { html = sourceRange.HtmlText; } } undoUnit.Commit(); } Trace.Assert(html != null, "Inline source CSS failed!"); } if (html == null) { html = DataMeister.HTMLData.HTMLSelection; } if (IsPasteFromSharedCanvas(DataMeister)) { if (action == DataAction.Copy) { // WinLive 96840 - Copying and pasting images within shared canvas should persist source // decorator settings. "wlCopySrcUrl" is inserted while copy/pasting within canvas. html = EditorContext.FixImageReferences(ImageCopyFixupHelper.FixupSourceUrlForCopy(html), DataMeister.HTMLData.SourceURL); } } else { html = EditorContext.FixImageReferences(html, DataMeister.HTMLData.SourceURL); HtmlCleanupRule cleanupRule = HtmlCleanupRule.Normal; if (IsOfficeHtml(DataMeister.HTMLData)) cleanupRule = HtmlCleanupRule.PreserveTables; // In Mail, we want to preserve the style of the html that is on the clipboard // Whereas in Writer we by default want to remove formatting so it looks like your blog theme if (EditorContext.CleanHtmlOnPaste) { // optionally cleanup the html html = EditorContext.HtmlGenerationService.CleanupHtml(html, baseUrl, cleanupRule); } else { html = HtmlCleaner.StripNamespacedTagsAndCommentsAndMarkupDirectives(html); } // standard fixups html = EditorContext.HtmlGenerationService.GenerateHtmlFromHtmlFragment(html, baseUrl); } // insert the content if (EditorContext.MarshalHtmlSupported) EditorContext.InsertHtml(begin, end, html, DataMeister.HTMLData.SourceURL); else if (EditorContext.MarshalTextSupported) { // This is called only in the case that we're attempting to marshal HTML, but only // text is supported. In this case, we should down convert to text and provide that. html = HTMLDocumentHelper.HTMLToPlainText(html); EditorContext.InsertHtml(begin, end, html, DataMeister.HTMLData.SourceURL); } else Debug.Assert(false, "Html being inserted when text or html isn't supported."); // Now select what was just inserted EditorContext.MarkupServices.CreateMarkupRange(begin, end).ToTextRange().select(); //place the caret at the end of the inserted content //EditorContext.MoveCaretToMarkupPointer(end, true); return true; } catch (Exception e) { //bugfix 1696, put exceptions into the trace log. Trace.Fail("Exception while inserting HTML: " + e.Message, e.StackTrace); return false; } } } private void CopyBodyAttributes(string sourceHtml, IHTMLElement destinationBody) { Debug.Assert(destinationBody != null, "destinationBody should not be null!"); if (destinationBody == null) { return; } var finder = new BodyTagFinder(sourceHtml); finder.Parse(); if (finder.BodyBeginTag != null) { StringBuilder bodyAttributes = new StringBuilder(); foreach (Attr attr in finder.BodyBeginTag.Attributes) { bodyAttributes.AppendFormat(CultureInfo.InvariantCulture, "{0} ", attr); } IHTMLElement sourceBodyElement = EditorContext.MarkupServices.CreateElement(_ELEMENT_TAG_ID.TAGID_BODY, bodyAttributes.ToString()); HTMLElementHelper.CopyAttributes(sourceBodyElement, destinationBody); } } /// <summary> /// Searches through the provided document for a start and end comment marker and then returns the fragment as /// a MarkupRange. /// </summary> /// <param name="document">The document to search.</param> /// <param name="startMarker">The comment text that marks the start of the fragment /// (e.g. &lt;!--StartFragment--&gt; ).</param> /// <param name="endMarker">The comment text that marks the end of the fragment /// (e.g. &lt;!--EndFragment--&gt; ).</param> /// <returns>The fragment as a MarkupRange or null if no valid fragment was found.</returns> private MarkupRange FindMarkedFragment(IHTMLDocument2 document, string startMarker, string endMarker) { MarkupPointer startFragment = null; MarkupPointer endFragment = null; MshtmlMarkupServices markupServices = new MshtmlMarkupServices((IMarkupServicesRaw)document); // Look for the markers in the document. foreach (IHTMLElement element in document.all) { if (element is IHTMLCommentElement && ((IHTMLCommentElement)element).text == startMarker) { startFragment = markupServices.CreateMarkupPointer(element, _ELEMENT_ADJACENCY.ELEM_ADJ_AfterEnd); } else if (element is IHTMLCommentElement && ((IHTMLCommentElement)element).text == endMarker) { endFragment = markupServices.CreateMarkupPointer(element, _ELEMENT_ADJACENCY.ELEM_ADJ_BeforeBegin); } } if (startFragment == null || endFragment == null || !startFragment.Positioned || !endFragment.Positioned || startFragment.IsRightOf(endFragment)) { Trace.WriteLine("Unable to find fragment or invalid fragment!"); return null; } // WinLive 251786: IE (and most other browsers) allow HTML like the following: // <p>This is a paragraph[cursor] // <p>This is a paragraph // However, when we use MarkupPointers to walk through this HTML, IE pretends there is a </p> at the end // of each of the above lines. This can cause issues when we copy part of this HTML somewhere else (e.g // everything after the [cursor]) and attempt to walk through both copies (e.g. during paste with keep // source formatting) at the same time. This holds true for some other elements, such as <li>s and <td>s. MarkupContext startContext = startFragment.Right(false); if (startFragment.IsLeftOf(endFragment) && startContext.Context == _MARKUP_CONTEXT_TYPE.CONTEXT_TYPE_ExitScope && startContext.Element != null && ElementFilters.IsEndTagOptional(startContext.Element) && !Regex.IsMatch(startContext.Element.outerHTML, String.Format(CultureInfo.InvariantCulture, @"</{0}(\s[^>]*)?>\s*$", startContext.Element.tagName), RegexOptions.IgnoreCase | RegexOptions.CultureInvariant)) { startFragment.Right(true); } return markupServices.CreateMarkupRange(startFragment, endFragment); } /// <summary> /// Takes the source HTML and makes necessary modifications to keep the source formatting as if it were to be /// pasted into the destination range. /// </summary> /// <param name="sourceRange">The range containing the HTML that is being copied.</param> /// <param name="destinationRange">The range that the source HTML will be copied to.</param> /// <returns>A serialized string of the source HTML with necessary modifications to keep the source formatting /// or null if unsuccessful.</returns> private string KeepSourceFormatting(MarkupRange sourceRange, MarkupRange destinationRange) { Debug.Assert(sourceRange.Start.Container.GetOwningDoc() == destinationRange.Start.Container.GetOwningDoc(), "Ranges must share an owning document!"); // We will temporarily add comments to the destination document to mark the destinationRange. IHTMLElement startComment = null; IHTMLElement endComment = null; try { // This is our true destination document. IHTMLDocument2 destinationDocument = destinationRange.Start.Container.Document; MshtmlMarkupServices destinationMarkupServices = new MshtmlMarkupServices((IMarkupServicesRaw)destinationDocument); // However, we'll use a temp destination because we don't want to paste anything into the real // document yet as it could fail, it would fire events, images would start loading, etc. MarkupContainer temporaryDestinationContainer = destinationMarkupServices.CreateMarkupContainer(); MarkupPointer temporaryDestinationPointer = destinationMarkupServices.CreateMarkupPointer(); temporaryDestinationPointer.MoveToContainer(temporaryDestinationContainer, true); // We add in comments to the destination document so that when we copy this range over to the fake // destination we'll be able to find the range again. destinationRange.Start.Gravity = _POINTER_GRAVITY.POINTER_GRAVITY_Left; destinationRange.End.Gravity = _POINTER_GRAVITY.POINTER_GRAVITY_Right; string startMarker = string.Format(CultureInfo.InvariantCulture, "<!--{0}-->", Guid.NewGuid()); destinationMarkupServices.InsertHtml(startMarker, destinationRange.Start); startComment = destinationRange.Start.Right(false).Element; string endMarker = string.Format(CultureInfo.InvariantCulture, "<!--{0}-->", Guid.NewGuid()); destinationMarkupServices.InsertHtml(endMarker, destinationRange.End); endComment = destinationRange.End.Left(false).Element; try { // Copy over the entire destination document into the fake destination document. MarkupRange destinationAll = SelectAll(destinationDocument); destinationMarkupServices.Copy(destinationAll.Start, destinationAll.End, temporaryDestinationPointer); // Find the original destination range in this copy. MarkupRange temporaryDestinationRange = FindMarkedFragment(temporaryDestinationContainer.Document, startMarker, endMarker); if (temporaryDestinationRange != null) { // Do the work to keep the source formatting. MarkupRange inlinedRange = new KeepSourceFormatting(sourceRange, temporaryDestinationRange).Execute(); if (inlinedRange != null) { return inlinedRange.HtmlText; } } } finally { // WinLive 249077: Clear the temporary destination container, otherwise behaviors may // inadvertently attach to elements in the MarkupContainer. temporaryDestinationContainer.Document.body.innerHTML = String.Empty; } } catch (Exception e) { // I really dont want some funky html on the clipboard that causes a problem with this code // to prevent a paste from going through. Trace.Fail("Failed to get inline css for selection: " + e); } finally { Debug.Assert(startComment is IHTMLCommentElement, "Didn't find start comment or it wasn't created."); if (startComment is IHTMLCommentElement) { HTMLElementHelper.RemoveElement(startComment); } Debug.Assert(endComment is IHTMLCommentElement, "Didn't find end comment or it wasn't created."); if (endComment is IHTMLCommentElement) { HTMLElementHelper.RemoveElement(endComment); } } return null; } /// <summary> /// Creates a MarkupRange that contains the entire provided document. /// </summary> /// <param name="document">The document to select.</param> /// <returns>A MarkupRange that contains the entire document.</returns> private MarkupRange SelectAll(IHTMLDocument2 document) { MshtmlMarkupServices markupServices = new MshtmlMarkupServices((IMarkupServicesRaw)document); MarkupRange entireDocument = markupServices.CreateMarkupRange(((IHTMLDocument3)document).documentElement, true); // Make sure the doctype and anything else outside the root element is selected too. MarkupContext context = entireDocument.Start.Left(true); while (context.Context != _MARKUP_CONTEXT_TYPE.CONTEXT_TYPE_None) { context = entireDocument.Start.Left(true); } context = entireDocument.End.Right(true); while (context.Context != _MARKUP_CONTEXT_TYPE.CONTEXT_TYPE_None) { context = entireDocument.End.Right(true); } return entireDocument; } /// <summary> /// Makes sure that whole (not parts of) tables are included in the source of a paste. /// </summary> /// <param name="range">The original source range. The range may be modified.</param> /// <param name="markupServices">MarkupServices for the range.</param> private void ExpandToIncludeTables(MarkupRange range, MshtmlMarkupServices markupServices) { MarkupPointer pointer = markupServices.CreateMarkupPointer(); IHTMLElement[] tableElements = range.GetElements(ElementFilters.TABLE_ELEMENTS, false); foreach (IHTMLElement element in tableElements) { IHTMLElement parentTable = element; while (parentTable != null && markupServices.GetElementTagId(parentTable) != _ELEMENT_TAG_ID.TAGID_TABLE) { parentTable = parentTable.parentElement; } if (parentTable != null) { pointer.MoveAdjacentToElement(parentTable, _ELEMENT_ADJACENCY.ELEM_ADJ_BeforeBegin); if (range.Start.IsRightOf(pointer)) { range.Start.MoveToPointer(pointer); } pointer.MoveAdjacentToElement(parentTable, _ELEMENT_ADJACENCY.ELEM_ADJ_AfterEnd); if (range.End.IsLeftOf(pointer)) { range.End.MoveToPointer(pointer); } } } } /// <summary> /// Makes sure that whole (not parts of) lists are included in the source of a paste. /// </summary> /// <param name="range">The original source range. The range may be modified.</param> /// <param name="markupServices">MarkupServices for the range.</param> private void ExpandToIncludeLists(MarkupRange range, MshtmlMarkupServices markupServices) { MarkupPointer pointer = markupServices.CreateMarkupPointer(); IHTMLElementFilter listFilter = ElementFilters.CreateCompoundElementFilter(ElementFilters.LIST_ELEMENTS, ElementFilters.LIST_ITEM_ELEMENTS); IHTMLElement[] listElements = range.GetElements(listFilter, false); foreach (IHTMLElement element in listElements) { IHTMLElement parentList = element; while (parentList != null && !ElementFilters.IsListElement(parentList)) { parentList = parentList.parentElement; } if (parentList != null) { pointer.MoveAdjacentToElement(parentList, _ELEMENT_ADJACENCY.ELEM_ADJ_BeforeBegin); if (range.Start.IsRightOf(pointer)) { range.Start.MoveToPointer(pointer); } pointer.MoveAdjacentToElement(parentList, _ELEMENT_ADJACENCY.ELEM_ADJ_AfterEnd); if (range.End.IsLeftOf(pointer)) { range.End.MoveToPointer(pointer); } } } } /// <summary> /// If this element is a table, and there's nothing else before or after it and it's aligned left or right /// then removes the alignment. /// </summary> /// <param name="range">The original source range.</param> private void RemoveAlignmentIfSingleTable(MarkupRange range) { // WinLive 273280: Alignment on a table acts like a float, which can throw off the layout of the rest of // the document. If there is nothing before or after the table, then we can safely remove the alignment. IHTMLElement[] topLevelElements = range.GetTopLevelElements(e => !(e is IHTMLCommentElement)); if (topLevelElements.Length == 1 && topLevelElements[0] is IHTMLTable && (String.Compare(topLevelElements[0].getAttribute("align", 2) as string, "left", StringComparison.OrdinalIgnoreCase) == 0 || String.Compare(topLevelElements[0].getAttribute("align", 2) as string, "right", StringComparison.OrdinalIgnoreCase) == 0)) { topLevelElements[0].removeAttribute("align", 0); } } public static bool IsPasteFromSharedCanvas(DataObjectMeister dataMeister) { return (dataMeister.HTMLData != null) && IsSharedCanvasTempUrl(dataMeister.HTMLData.SourceURL); } public static bool IsSharedCanvasTempUrl(string url) { try { return UrlHelper.IsFileUrl(url) && TempFileManager.Instance.IsPathContained(new Uri(url).LocalPath); } catch { // maybe URL was not valid or something... who knows return false; } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Linq; using System.Linq.Expressions; using Xunit; namespace ComparedQueryable.Test.NativeQueryableTests { public class MaxTests : EnumerableBasedTests { [Fact] public void NullInt32Source() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<int>)null).Max()); } [Fact] public void EmptyInt32() { Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<int>().AsNaturalQueryable().Max()); } [Fact] public void Int32MaxRepeated() { int[] source = { -6, 0, -9, 0, -10, 0 }; Assert.Equal(0, source.AsNaturalQueryable().Max()); } [Fact] public void NullInt64Source() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<long>)null).Max()); } [Fact] public void EmptyInt64() { Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<long>().AsNaturalQueryable().Max()); } [Fact] public void Int64MaxRepeated() { long[] source = { 6, 50, 9, 50, 10, 50 }; Assert.Equal(50, source.AsNaturalQueryable().Max()); } [Fact] public void EmptySingle() { Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<float>().AsNaturalQueryable().Max()); } [Fact] public void Single_MaxRepeated() { float[] source = { -5.5f, float.PositiveInfinity, 9.9f, float.PositiveInfinity }; Assert.True(float.IsPositiveInfinity(source.AsNaturalQueryable().Max())); } [Fact] public void NullSingleSource() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<float>)null).Max()); } [Fact] public void NullDoubleSource() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<double>)null).Max()); } [Fact] public void EmptyDouble() { Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<double>().AsNaturalQueryable().Max()); } [Fact] public void DoubleMaximumRepeated() { double[] source = { -5.5, double.PositiveInfinity, 9.9, double.PositiveInfinity }; Assert.True(double.IsPositiveInfinity(source.AsNaturalQueryable().Max())); } [Fact] public void NullDecimalSource() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<decimal>)null).Max()); } [Fact] public void EmptyDecimal() { Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<decimal>().AsNaturalQueryable().Max()); } [Fact] public void DecimalMaximumRepeated() { decimal[] source = { -5.5m, 0m, 9.9m, -5.5m, 9.9m }; Assert.Equal(9.9m, source.AsNaturalQueryable().Max()); } [Fact] public void NullNullableInt32Source() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<int?>)null).Max()); } [Fact] public void EmptyNullableInt32() { Assert.Null(Enumerable.Empty<int?>().AsNaturalQueryable().Max()); } [Fact] public void NullableInt32MaxRepeated() { int?[] source = { 6, null, null, 100, 9, 100, 10, 100 }; Assert.Equal(100, source.AsNaturalQueryable().Max()); } [Fact] public void NullNullableInt64Source() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<long?>)null).Max()); } [Fact] public void EmptyNullableInt64() { Assert.Null(Enumerable.Empty<long?>().AsNaturalQueryable().Max(x => x)); } [Fact] public void NullableInt64MaximumRepeated() { long?[] source = { -6, null, null, 0, -9, 0, -10, -30 }; Assert.Equal(0, source.AsNaturalQueryable().Max()); } [Fact] public void EmptyNullableSingle() { Assert.Null(Enumerable.Empty<float?>().AsNaturalQueryable().Max()); } [Fact] public void NullableSingleMaxRepeated() { float?[] source = { -6.4f, null, null, -0.5f, -9.4f, -0.5f, -10.9f, -0.5f }; Assert.Equal(-0.5f, source.AsNaturalQueryable().Max()); } [Fact] public void NullNullableDoubleSource() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<double?>)null).Max()); } [Fact] public void EmptyNullableDouble() { Assert.Null(Enumerable.Empty<double?>().AsNaturalQueryable().Max()); } [Fact] public void NullNullableDecimalSource() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<decimal?>)null).Max()); } [Fact] public void EmptyNullableDecimal() { Assert.Null(Enumerable.Empty<decimal?>().AsNaturalQueryable().Max()); } [Fact] public void NullableDecimalMaximumRepeated() { decimal?[] source = { 6.4m, null, null, decimal.MaxValue, 9.4m, decimal.MaxValue, 10.9m, decimal.MaxValue }; Assert.Equal(decimal.MaxValue, source.AsNaturalQueryable().Max()); } [Fact] public void EmptyDateTime() { Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<DateTime>().AsNaturalQueryable().Max()); } [Fact] public void NullDateTimeSource() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<DateTime>)null).Max()); } [Fact] public void NullStringSource() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<string>)null).Max()); } [Fact] public void EmptyString() { Assert.Null(Enumerable.Empty<string>().AsNaturalQueryable().Max()); } [Fact] public void StringMaximumRepeated() { string[] source = { "ooo", "ccc", "ccc", "ooo", "ooo", "nnn" }; Assert.Equal("ooo", source.AsNaturalQueryable().Max()); } [Fact] public void EmptyInt32WithSelector() { Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<int>().Max(x => x)); } [Fact] public void NullInt32SourceWithSelector() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<int>)null).Max(i => i)); } [Fact] public void Int32SourceWithNullSelector() { Expression<Func<int, int>> selector = null; AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<int>().AsNaturalQueryable().Max(selector)); } [Fact] public void MaxInt32WithSelectorAccessingProperty() { var source = new[]{ new { name="Tim", num=10 }, new { name="John", num=-105 }, new { name="Bob", num=30 } }; Assert.Equal(30, source.AsNaturalQueryable().Max(e => e.num)); } [Fact] public void EmptyInt64WithSelector() { Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<long>().AsNaturalQueryable().Max(x => x)); } [Fact] public void NullInt64SourceWithSelector() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<long>)null).Max(i => i)); } [Fact] public void Int64SourceWithNullSelector() { Expression<Func<long, long>> selector = null; AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<long>().AsNaturalQueryable().Max(selector)); } [Fact] public void MaxInt64WithSelectorAccessingProperty() { var source = new[]{ new { name="Tim", num=10L }, new { name="John", num=-105L }, new { name="Bob", num=long.MaxValue } }; Assert.Equal(long.MaxValue, source.AsNaturalQueryable().Max(e => e.num)); } [Fact] public void MaxSingleWithSelectorAccessingProperty() { var source = new [] { new { name = "Tim", num = 40.5f }, new { name = "John", num = -10.25f }, new { name = "Bob", num = 100.45f } }; Assert.Equal(100.45f, source.AsNaturalQueryable().Max(e => e.num)); } [Fact] public void NullSingleSourceWithSelector() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<float>)null).Max(i => i)); } [Fact] public void SingleSourceWithNullSelector() { Expression<Func<float, float>> selector = null; AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<float>().AsNaturalQueryable().Max(selector)); } [Fact] public void EmptySingleWithSelector() { Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<float>().AsNaturalQueryable().Max(x => x)); } [Fact] public void EmptyDoubleWithSelector() { Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<double>().AsNaturalQueryable().Max(x => x)); } [Fact] public void NullDoubleSourceWithSelector() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<double>)null).Max(i => i)); } [Fact] public void DoubleSourceWithNullSelector() { Expression<Func<double, double>> selector = null; AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<double>().AsNaturalQueryable().Max(selector)); } [Fact] public void MaxDoubleWithSelectorAccessingField() { var source = new[]{ new { name="Tim", num=40.5 }, new { name="John", num=-10.25 }, new { name="Bob", num=100.45 } }; Assert.Equal(100.45, source.AsNaturalQueryable().Max(e => e.num)); } [Fact] public void EmptyDecimalWithSelector() { Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<decimal>().AsNaturalQueryable().Max(x => x)); } [Fact] public void NullDecimalSourceWithSelector() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<decimal>)null).Max(i => i)); } [Fact] public void DecimalSourceWithNullSelector() { Expression<Func<decimal, decimal>> selector = null; AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<decimal>().AsNaturalQueryable().Max(selector)); } [Fact] public void MaxDecimalWithSelectorAccessingProperty() { var source = new[]{ new { name="Tim", num=420.5m }, new { name="John", num=900.25m }, new { name="Bob", num=10.45m } }; Assert.Equal(900.25m, source.AsNaturalQueryable().Max(e => e.num)); } [Fact] public void EmptyNullableInt32WithSelector() { Assert.Null(Enumerable.Empty<int?>().Max(x => x)); } [Fact] public void NullNullableInt32SourceWithSelector() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<int?>)null).Max(i => i)); } [Fact] public void NullableInt32SourceWithNullSelector() { Expression<Func<int?, int?>> selector = null; AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<int?>().AsNaturalQueryable().Max(selector)); } [Fact] public void MaxNullableInt32WithSelectorAccessingField() { var source = new[]{ new { name="Tim", num=(int?)10 }, new { name="John", num=(int?)-105 }, new { name="Bob", num=(int?)null } }; Assert.Equal(10, source.AsNaturalQueryable().Max(e => e.num)); } [Fact] public void EmptyNullableInt64WithSelector() { Assert.Null(Enumerable.Empty<long?>().AsNaturalQueryable().Max(x => x)); } [Fact] public void NullNullableInt64SourceWithSelector() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<long?>)null).Max(i => i)); } [Fact] public void NullableInt64SourceWithNullSelector() { Expression<Func<long?, long?>> selector = null; AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<long?>().AsNaturalQueryable().Max(selector)); } [Fact] public void MaxNullableInt64WithSelectorAccessingField() { var source = new[]{ new {name="Tim", num=default(long?) }, new {name="John", num=(long?)-105L }, new {name="Bob", num=(long?)long.MaxValue } }; Assert.Equal(long.MaxValue, source.AsNaturalQueryable().Max(e => e.num)); } [Fact] public void EmptyNullableSingleWithSelector() { Assert.Null(Enumerable.Empty<float?>().AsNaturalQueryable().Max(x => x)); } [Fact] public void NullNullableSingleSourceWithSelector() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<float?>)null).Max(i => i)); } [Fact] public void NullableSingleSourceWithNullSelector() { Expression<Func<float?, float?>> selector = null; AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<float?>().AsNaturalQueryable().Max(selector)); } [Fact] public void MaxNullableSingleWithSelectorAccessingProperty() { var source = new[] { new { name="Tim", num=(float?)40.5f }, new { name="John", num=(float?)null }, new { name="Bob", num=(float?)100.45f } }; Assert.Equal(100.45f, source.AsNaturalQueryable().Max(e => e.num)); } [Fact] public void EmptyNullableDoubleWithSelector() { Assert.Null(Enumerable.Empty<double?>().AsNaturalQueryable().Max(x => x)); } [Fact] public void NullNullableDoubleSourceWithSelector() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<double?>)null).Max(i => i)); } [Fact] public void NullableDoubleSourceWithNullSelector() { Expression<Func<double?, double?>> selector = null; AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<double?>().AsNaturalQueryable().Max(selector)); } [Fact] public void MaxNullableDoubleWithSelectorAccessingProperty() { var source = new []{ new { name = "Tim", num = (double?)40.5}, new { name = "John", num = default(double?)}, new { name = "Bob", num = (double?)100.45} }; Assert.Equal(100.45, source.AsNaturalQueryable().Max(e => e.num)); } [Fact] public void EmptyNullableDecimalWithSelector() { Assert.Null(Enumerable.Empty<decimal?>().AsNaturalQueryable().Max(x => x)); } [Fact] public void NullNullableDecimalSourceWithSelector() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<decimal?>)null).Max(i => i)); } [Fact] public void NullableDecimalSourceWithNullSelector() { Expression<Func<decimal?, decimal?>> selector = null; AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<decimal?>().AsNaturalQueryable().Max(selector)); } [Fact] public void MaxNullableDecimalWithSelectorAccessingProperty() { var source = new[] { new { name="Tim", num=(decimal?)420.5m }, new { name="John", num=default(decimal?) }, new { name="Bob", num=(decimal?)10.45m } }; Assert.Equal(420.5m, source.AsNaturalQueryable().Max(e => e.num)); } [Fact] public void EmptyNullableDateTimeWithSelector() { Assert.Null(Enumerable.Empty<DateTime?>().AsNaturalQueryable().Max(x => x)); } [Fact] public void NullNullableDateTimeSourceWithSelector() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<DateTime?>)null).Max(i => i)); } [Fact] public void NullableDateTimeSourceWithNullSelector() { Expression<Func<DateTime?, DateTime?>> selector = null; AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<DateTime?>().AsNaturalQueryable().Max(selector)); } public void EmptyStringSourceWithSelector() { Assert.Null(Enumerable.Empty<string>().AsNaturalQueryable().Max(x => x)); } [Fact] public void NullStringSourceWithSelector() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IQueryable<string>)null).Max(i => i)); } [Fact] public void StringSourceWithNullSelector() { Expression<Func<string, string>> selector = null; AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<string>().AsNaturalQueryable().Max(selector)); } [Fact] public void MaxStringWithSelectorAccessingProperty() { var source = new[]{ new { name="Tim", num=420.5m }, new { name="John", num=900.25m }, new { name="Bob", num=10.45m } }; Assert.Equal("Tim", source.AsNaturalQueryable().Max(e => e.name)); } [Fact] public void EmptyBoolean() { Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<bool>().AsNaturalQueryable().Max()); } [Fact] public void Max1() { var val = (new int[] { 0, 2, 1 }).AsNaturalQueryable().Max(); Assert.Equal(2, val); } [Fact] public void Max2() { var val = (new int[] { 0, 2, 1 }).AsNaturalQueryable().Max(n => n); Assert.Equal(2, val); } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis.Completion; using Microsoft.CodeAnalysis.CSharp.Extensions; using Microsoft.CodeAnalysis.CSharp.Symbols; using Microsoft.CodeAnalysis.CSharp.Syntax; using Microsoft.CodeAnalysis.Options; using Microsoft.CodeAnalysis.Shared.Extensions; using Microsoft.CodeAnalysis.Text; using Roslyn.Utilities; using System.Collections.Immutable; using Microsoft.CodeAnalysis.Completion.Providers; namespace Microsoft.CodeAnalysis.CSharp.Completion.Providers { internal partial class CrefCompletionProvider : CommonCompletionProvider { public static readonly SymbolDisplayFormat QualifiedCrefFormat = new SymbolDisplayFormat( globalNamespaceStyle: SymbolDisplayGlobalNamespaceStyle.Omitted, typeQualificationStyle: SymbolDisplayTypeQualificationStyle.NameOnly, propertyStyle: SymbolDisplayPropertyStyle.NameOnly, genericsOptions: SymbolDisplayGenericsOptions.IncludeTypeParameters, parameterOptions: SymbolDisplayParameterOptions.None, miscellaneousOptions: SymbolDisplayMiscellaneousOptions.EscapeKeywordIdentifiers); public static readonly SymbolDisplayFormat CrefFormat = new SymbolDisplayFormat( globalNamespaceStyle: SymbolDisplayGlobalNamespaceStyle.Omitted, typeQualificationStyle: SymbolDisplayTypeQualificationStyle.NameOnly, propertyStyle: SymbolDisplayPropertyStyle.NameOnly, genericsOptions: SymbolDisplayGenericsOptions.IncludeTypeParameters, parameterOptions: SymbolDisplayParameterOptions.None, miscellaneousOptions: SymbolDisplayMiscellaneousOptions.EscapeKeywordIdentifiers | SymbolDisplayMiscellaneousOptions.UseSpecialTypes); internal override bool IsInsertionTrigger(SourceText text, int characterPosition, OptionSet options) { return CompletionUtilities.IsTriggerCharacter(text, characterPosition, options); } public override async Task ProvideCompletionsAsync(CompletionContext context) { var document = context.Document; var position = context.Position; var options = context.Options; var cancellationToken = context.CancellationToken; var tree = await document.GetSyntaxTreeAsync(cancellationToken).ConfigureAwait(false); if (!tree.IsEntirelyWithinCrefSyntax(position, cancellationToken)) { return; } var token = tree.FindTokenOnLeftOfPosition(position, cancellationToken, includeDocumentationComments: true) .GetPreviousTokenIfTouchingWord(position); // To get a Speculative SemanticModel (which is much faster), we need to // walk up to the node the DocumentationTrivia is attached to. var parentNode = token.Parent.FirstAncestorOrSelf<DocumentationCommentTriviaSyntax>()?.ParentTrivia.Token.Parent; if (parentNode == null) { return; } var semanticModel = await document.GetSemanticModelForNodeAsync(parentNode, cancellationToken).ConfigureAwait(false); var symbols = GetSymbols(token, semanticModel, cancellationToken); symbols = symbols.FilterToVisibleAndBrowsableSymbols(options.GetOption(CompletionOptions.HideAdvancedMembers, semanticModel.Language), semanticModel.Compilation); if (!symbols.Any()) { return; } context.IsExclusive = true; var text = await document.GetTextAsync(cancellationToken).ConfigureAwait(false); var span = GetCompletionItemSpan(text, position); var items = CreateCompletionItems(document.Project.Solution.Workspace, semanticModel, symbols, token, span); context.AddItems(items); } private static bool IsCrefStartContext(SyntaxToken token) { // cases: // <see cref="| // <see cref='| return token.IsKind(SyntaxKind.DoubleQuoteToken, SyntaxKind.SingleQuoteToken) && token.Parent.IsKind(SyntaxKind.XmlCrefAttribute); } private static bool IsCrefParameterListContext(SyntaxToken token) { // cases: // <see cref="M(| // <see cref="M(x, | // <see cref="M(x, ref | // <see cref="M(x, out | // <see cref="M[| // <see cref="M[x, | // <see cref="M[x, ref | // <see cref="M[x, out | if (!token.Parent.IsKind(SyntaxKind.CrefParameterList, SyntaxKind.CrefBracketedParameterList)) { return false; } if (token.IsKind(SyntaxKind.OpenParenToken) && token.Parent.IsKind(SyntaxKind.CrefParameterList)) { return true; } if (token.IsKind(SyntaxKind.OpenBracketToken) && token.Parent.IsKind(SyntaxKind.CrefBracketedParameterList)) { return true; } return token.IsKind(SyntaxKind.CommaToken, SyntaxKind.RefKeyword, SyntaxKind.OutKeyword); } private static bool IsCrefQualifiedNameContext(SyntaxToken token) { // cases: // <see cref="x.| return token.IsKind(SyntaxKind.DotToken) && token.Parent.IsKind(SyntaxKind.QualifiedCref); } private static IEnumerable<ISymbol> GetSymbols(SyntaxToken token, SemanticModel semanticModel, CancellationToken cancellationToken) { if (IsCrefStartContext(token)) { return GetUnqualifiedSymbols(token, semanticModel, cancellationToken); } else if (IsCrefParameterListContext(token)) { return semanticModel.LookupNamespacesAndTypes(token.SpanStart); } else if (IsCrefQualifiedNameContext(token)) { return GetQualifiedSymbols((QualifiedCrefSyntax)token.Parent, token, semanticModel, cancellationToken); } return SpecializedCollections.EmptyEnumerable<ISymbol>(); } private static IEnumerable<ISymbol> GetUnqualifiedSymbols(SyntaxToken token, SemanticModel semanticModel, CancellationToken cancellationToken) { foreach (var symbol in semanticModel.LookupSymbols(token.SpanStart)) { yield return symbol; } // LookupSymbols doesn't return indexers or operators because they can't be referred to by name. // So, try to find the innermost type declaration and return its operators and indexers var typeDeclaration = token.Parent?.FirstAncestorOrSelf<TypeDeclarationSyntax>(); if (typeDeclaration != null) { var type = semanticModel.GetDeclaredSymbol(typeDeclaration, cancellationToken); if (type != null) { foreach (var baseType in type.GetBaseTypesAndThis()) { foreach (var member in baseType.GetMembers()) { if ((member.IsIndexer() || member.IsUserDefinedOperator()) && member.IsAccessibleWithin(type)) { yield return member; } } } } } } private static IEnumerable<ISymbol> GetQualifiedSymbols(QualifiedCrefSyntax parent, SyntaxToken token, SemanticModel semanticModel, CancellationToken cancellationToken) { var leftType = semanticModel.GetTypeInfo(parent.Container, cancellationToken).Type; var leftSymbol = semanticModel.GetSymbolInfo(parent.Container, cancellationToken).Symbol; var container = (leftSymbol ?? leftType) as INamespaceOrTypeSymbol; foreach (var symbol in semanticModel.LookupSymbols(token.SpanStart, container)) { yield return symbol; } var namedTypeContainer = container as INamedTypeSymbol; if (namedTypeContainer != null) { foreach (var instanceConstructor in namedTypeContainer.InstanceConstructors) { yield return instanceConstructor; } } } private static TextSpan GetCompletionItemSpan(SourceText text, int position) { return CommonCompletionUtilities.GetWordSpan( text, position, ch => CompletionUtilities.IsCompletionItemStartCharacter(ch) || ch == '{', ch => CompletionUtilities.IsWordCharacter(ch) || ch == '{' || ch == '}'); } private IEnumerable<CompletionItem> CreateCompletionItems( Workspace workspace, SemanticModel semanticModel, IEnumerable<ISymbol> symbols, SyntaxToken token, TextSpan itemSpan) { var builder = SharedPools.Default<StringBuilder>().Allocate(); try { foreach (var symbol in symbols) { builder.Clear(); yield return CreateItem(workspace, semanticModel, symbol, token, builder); } } finally { SharedPools.Default<StringBuilder>().ClearAndFree(builder); } } private CompletionItem CreateItem( Workspace workspace, SemanticModel semanticModel, ISymbol symbol, SyntaxToken token, StringBuilder builder) { int position = token.SpanStart; if (symbol is INamespaceOrTypeSymbol && token.IsKind(SyntaxKind.DotToken)) { // Handle qualified namespace and type names. builder.Append(symbol.ToDisplayString(QualifiedCrefFormat)); } else { // Handle unqualified namespace and type names, or member names. builder.Append(symbol.ToMinimalDisplayString(semanticModel, position, CrefFormat)); var parameters = symbol.GetParameters(); if (!parameters.IsDefaultOrEmpty) { // Note: we intentionally don't add the "params" modifier for any parameters. builder.Append(symbol.IsIndexer() ? '[' : '('); for (int i = 0; i < parameters.Length; i++) { if (i > 0) { builder.Append(", "); } var parameter = parameters[i]; if (parameter.RefKind == RefKind.Out) { builder.Append("out "); } else if (parameter.RefKind == RefKind.Ref) { builder.Append("ref "); } builder.Append(parameter.Type.ToMinimalDisplayString(semanticModel, position)); } builder.Append(symbol.IsIndexer() ? ']' : ')'); } } var symbolText = builder.ToString(); var insertionText = builder .Replace('<', '{') .Replace('>', '}') .ToString(); return SymbolCompletionItem.Create( displayText: insertionText, insertionText: insertionText, symbol: symbol, contextPosition: position, sortText: symbolText, rules: GetRules(insertionText)); } public override Task<CompletionDescription> GetDescriptionAsync(Document document, CompletionItem item, CancellationToken cancellationToken) { return SymbolCompletionItem.GetDescriptionAsync(item, document, cancellationToken); } private static readonly CharacterSetModificationRule s_WithoutOpenBrace = CharacterSetModificationRule.Create(CharacterSetModificationKind.Remove, '{'); private static readonly CharacterSetModificationRule s_WithoutOpenParen = CharacterSetModificationRule.Create(CharacterSetModificationKind.Remove, '('); private CompletionItemRules GetRules(string displayText) { var commitRules = ImmutableArray<CharacterSetModificationRule>.Empty; if (displayText.Contains("{")) { commitRules = commitRules.Add(s_WithoutOpenBrace); } if (displayText.Contains("(")) { commitRules = commitRules.Add(s_WithoutOpenParen); } if (commitRules.IsEmpty) { return CompletionItemRules.Default; } else { return CompletionItemRules.Default.WithCommitCharacterRules(commitRules); } } private static readonly string InsertionTextProperty = "insertionText"; protected override Task<TextChange?> GetTextChangeAsync(CompletionItem selectedItem, char? ch, CancellationToken cancellationToken) { string insertionText; if (!selectedItem.Properties.TryGetValue(InsertionTextProperty, out insertionText)) { insertionText = selectedItem.DisplayText; } return Task.FromResult<TextChange?>(new TextChange(selectedItem.Span, insertionText)); } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.ComponentModel.Composition; using System.Linq; using System.Threading; using Microsoft.CodeAnalysis.Editor.Commands; using Microsoft.CodeAnalysis.Editor.Host; using Microsoft.CodeAnalysis.Editor.Shared.Extensions; using Microsoft.CodeAnalysis.Editor.Shared.Utilities; using Microsoft.CodeAnalysis.Shared.Extensions; using Microsoft.CodeAnalysis.Text; using Microsoft.CodeAnalysis.Text.Shared.Extensions; using Microsoft.VisualStudio.Text; using Microsoft.VisualStudio.Text.Editor; using Microsoft.VisualStudio.Text.Operations; using Roslyn.Utilities; namespace Microsoft.CodeAnalysis.Editor.Implementation.CommentSelection { [ExportCommandHandler(PredefinedCommandHandlerNames.CommentSelection, ContentTypeNames.RoslynContentType)] internal class CommentUncommentSelectionCommandHandler : ICommandHandler<CommentSelectionCommandArgs>, ICommandHandler<UncommentSelectionCommandArgs> { private readonly IWaitIndicator _waitIndicator; private readonly ITextUndoHistoryRegistry _undoHistoryRegistry; private readonly IEditorOperationsFactoryService _editorOperationsFactoryService; [ImportingConstructor] internal CommentUncommentSelectionCommandHandler( IWaitIndicator waitIndicator, ITextUndoHistoryRegistry undoHistoryRegistry, IEditorOperationsFactoryService editorOperationsFactoryService) { Contract.ThrowIfNull(waitIndicator); Contract.ThrowIfNull(undoHistoryRegistry); Contract.ThrowIfNull(editorOperationsFactoryService); _waitIndicator = waitIndicator; _undoHistoryRegistry = undoHistoryRegistry; _editorOperationsFactoryService = editorOperationsFactoryService; } private static CommandState GetCommandState(ITextBuffer buffer, Func<CommandState> nextHandler) { if (!buffer.CanApplyChangeDocumentToWorkspace()) { return nextHandler(); } return CommandState.Available; } public CommandState GetCommandState(CommentSelectionCommandArgs args, Func<CommandState> nextHandler) { return GetCommandState(args.SubjectBuffer, nextHandler); } /// <summary> /// Comment the selected spans, and reset the selection. /// </summary> public void ExecuteCommand(CommentSelectionCommandArgs args, Action nextHandler) { this.ExecuteCommand(args.TextView, args.SubjectBuffer, Operation.Comment); } public CommandState GetCommandState(UncommentSelectionCommandArgs args, Func<CommandState> nextHandler) { return GetCommandState(args.SubjectBuffer, nextHandler); } /// <summary> /// Uncomment the selected spans, and reset the selection. /// </summary> public void ExecuteCommand(UncommentSelectionCommandArgs args, Action nextHandler) { this.ExecuteCommand(args.TextView, args.SubjectBuffer, Operation.Uncomment); } internal void ExecuteCommand(ITextView textView, ITextBuffer subjectBuffer, Operation operation) { var title = operation == Operation.Comment ? EditorFeaturesResources.Comment_Selection : EditorFeaturesResources.Uncomment_Selection; var message = operation == Operation.Comment ? EditorFeaturesResources.Commenting_currently_selected_text : EditorFeaturesResources.Uncommenting_currently_selected_text; _waitIndicator.Wait( title, message, allowCancel: false, action: waitContext => { var document = subjectBuffer.CurrentSnapshot.GetOpenDocumentInCurrentContextWithChanges(); if (document == null) { return; } var service = document.GetLanguageService<ICommentUncommentService>(); if (service == null) { return; } var trackingSpans = new List<ITrackingSpan>(); var textChanges = new List<TextChange>(); CollectEdits(service, textView.Selection.GetSnapshotSpansOnBuffer(subjectBuffer), textChanges, trackingSpans, operation); using (var transaction = new CaretPreservingEditTransaction(title, textView, _undoHistoryRegistry, _editorOperationsFactoryService)) { document.Project.Solution.Workspace.ApplyTextChanges(document.Id, textChanges, waitContext.CancellationToken); transaction.Complete(); } if (operation == Operation.Uncomment) { using (var transaction = new CaretPreservingEditTransaction(title, textView, _undoHistoryRegistry, _editorOperationsFactoryService)) { Format(service, subjectBuffer.CurrentSnapshot, trackingSpans, waitContext.CancellationToken); transaction.Complete(); } } if (trackingSpans.Any()) { // TODO, this doesn't currently handle block selection textView.SetSelection(trackingSpans.First().GetSpan(subjectBuffer.CurrentSnapshot)); } }); } private void Format(ICommentUncommentService service, ITextSnapshot snapshot, IEnumerable<ITrackingSpan> changes, CancellationToken cancellationToken) { var document = snapshot.GetOpenDocumentInCurrentContextWithChanges(); if (document == null) { return; } var textSpans = changes.Select(s => s.GetSpan(snapshot)).Select(s => s.Span.ToTextSpan()).ToList(); var newDocument = service.Format(document, textSpans, cancellationToken); newDocument.Project.Solution.Workspace.ApplyDocumentChanges(newDocument, cancellationToken); } internal enum Operation { Comment, Uncomment } /// <summary> /// Add the necessary edits to the given spans. Also collect tracking spans over each span. /// /// Internal so that it can be called by unit tests. /// </summary> internal void CollectEdits(ICommentUncommentService service, NormalizedSnapshotSpanCollection selectedSpans, List<TextChange> textChanges, List<ITrackingSpan> trackingSpans, Operation operation) { foreach (var span in selectedSpans) { if (operation == Operation.Comment) { CommentSpan(service, span, textChanges, trackingSpans); } else { UncommentSpan(service, span, textChanges, trackingSpans); } } } /// <summary> /// Add the necessary edits to comment out a single span. /// </summary> private void CommentSpan(ICommentUncommentService service, SnapshotSpan span, List<TextChange> textChanges, List<ITrackingSpan> trackingSpans) { var firstAndLastLine = DetermineFirstAndLastLine(span); if (span.IsEmpty && firstAndLastLine.Item1.IsEmptyOrWhitespace()) { return; } if (!span.IsEmpty && string.IsNullOrWhiteSpace(span.GetText())) { return; } if (span.IsEmpty || string.IsNullOrWhiteSpace(span.GetText())) { var firstNonWhitespaceOnLine = firstAndLastLine.Item1.GetFirstNonWhitespacePosition(); var insertPosition = firstNonWhitespaceOnLine.HasValue ? firstNonWhitespaceOnLine.Value : firstAndLastLine.Item1.Start; // If there isn't a selection, we select the whole line trackingSpans.Add(span.Snapshot.CreateTrackingSpan(Span.FromBounds(firstAndLastLine.Item1.Start, firstAndLastLine.Item1.End), SpanTrackingMode.EdgeInclusive)); InsertText(textChanges, insertPosition, service.SingleLineCommentString); } else { if (service.SupportsBlockComment && !SpanIncludesAllTextOnIncludedLines(span) && firstAndLastLine.Item1.LineNumber == firstAndLastLine.Item2.LineNumber) { trackingSpans.Add(span.Snapshot.CreateTrackingSpan(span, SpanTrackingMode.EdgeInclusive)); InsertText(textChanges, span.Start, service.BlockCommentStartString); InsertText(textChanges, span.End, service.BlockCommentEndString); } else { // Select the entirety of the lines, so that another comment operation will add more comments, not insert block comments. trackingSpans.Add(span.Snapshot.CreateTrackingSpan(Span.FromBounds(firstAndLastLine.Item1.Start.Position, firstAndLastLine.Item2.End.Position), SpanTrackingMode.EdgeInclusive)); var indentToCommentAt = DetermineSmallestIndent(span, firstAndLastLine); ApplyCommentToNonBlankLines(service, textChanges, firstAndLastLine, indentToCommentAt); } } } /// <summary> /// Record "Insert text" text changes. /// </summary> private void InsertText(List<TextChange> textChanges, int position, string text) { textChanges.Add(new TextChange(new TextSpan(position, 0), text)); } /// <summary> /// Record "Delete text" text changes. /// </summary> private void DeleteText(List<TextChange> textChanges, TextSpan span) { textChanges.Add(new TextChange(span, string.Empty)); } /// <summary> /// Add the necessary edits to uncomment out a single span. /// </summary> private void UncommentSpan(ICommentUncommentService service, SnapshotSpan span, List<TextChange> textChanges, List<ITrackingSpan> spansToSelect) { if (TryUncommentSingleLineComments(service, span, textChanges, spansToSelect)) { return; } TryUncommentContainingBlockComment(service, span, textChanges, spansToSelect); } private bool TryUncommentContainingBlockComment(ICommentUncommentService service, SnapshotSpan span, List<TextChange> textChanges, List<ITrackingSpan> spansToSelect) { // We didn't make any single line changes. If the language supports block comments, see // if we're inside a containing block comment and uncomment that. if (!service.SupportsBlockComment) { return false; } var positionOfStart = -1; var positionOfEnd = -1; var spanText = span.GetText(); var trimmedSpanText = spanText.Trim(); // See if the selection includes just a block comment (plus whitespace) if (trimmedSpanText.StartsWith(service.BlockCommentStartString, StringComparison.Ordinal) && trimmedSpanText.EndsWith(service.BlockCommentEndString, StringComparison.Ordinal)) { positionOfStart = span.Start + spanText.IndexOf(service.BlockCommentStartString, StringComparison.Ordinal); positionOfEnd = span.Start + spanText.LastIndexOf(service.BlockCommentEndString, StringComparison.Ordinal); } else { // See if we are (textually) contained in a block comment. // This could allow a selection that spans multiple block comments to uncomment the beginning of // the first and end of the last. Oh well. var text = span.Snapshot.AsText(); positionOfStart = text.LastIndexOf(service.BlockCommentStartString, span.Start, caseSensitive: true); // If we found a start comment marker, make sure there isn't an end comment marker after it but before our span. if (positionOfStart >= 0) { var lastEnd = text.LastIndexOf(service.BlockCommentEndString, span.Start, caseSensitive: true); if (lastEnd < positionOfStart) { positionOfEnd = text.IndexOf(service.BlockCommentEndString, span.End, caseSensitive: true); } else if (lastEnd + service.BlockCommentEndString.Length > span.End) { // The end of the span is *inside* the end marker, so searching backwards found it. positionOfEnd = lastEnd; } } } if (positionOfStart < 0 || positionOfEnd < 0) { return false; } spansToSelect.Add(span.Snapshot.CreateTrackingSpan(Span.FromBounds(positionOfStart, positionOfEnd + service.BlockCommentEndString.Length), SpanTrackingMode.EdgeExclusive)); DeleteText(textChanges, new TextSpan(positionOfStart, service.BlockCommentStartString.Length)); DeleteText(textChanges, new TextSpan(positionOfEnd, service.BlockCommentEndString.Length)); return true; } private bool TryUncommentSingleLineComments(ICommentUncommentService service, SnapshotSpan span, List<TextChange> textChanges, List<ITrackingSpan> spansToSelect) { // First see if we're selecting any lines that have the single-line comment prefix. // If so, then we'll just remove the single-line comment prefix from those lines. var firstAndLastLine = DetermineFirstAndLastLine(span); for (int lineNumber = firstAndLastLine.Item1.LineNumber; lineNumber <= firstAndLastLine.Item2.LineNumber; ++lineNumber) { var line = span.Snapshot.GetLineFromLineNumber(lineNumber); var lineText = line.GetText(); if (lineText.Trim().StartsWith(service.SingleLineCommentString, StringComparison.Ordinal)) { DeleteText(textChanges, new TextSpan(line.Start.Position + lineText.IndexOf(service.SingleLineCommentString, StringComparison.Ordinal), service.SingleLineCommentString.Length)); } } // If we made any changes, select the entirety of the lines we change, so that subsequent invocations will // affect the same lines. if (!textChanges.Any()) { return false; } spansToSelect.Add(span.Snapshot.CreateTrackingSpan(Span.FromBounds(firstAndLastLine.Item1.Start.Position, firstAndLastLine.Item2.End.Position), SpanTrackingMode.EdgeExclusive)); return true; } /// <summary> /// Adds edits to comment out each non-blank line, at the given indent. /// </summary> private void ApplyCommentToNonBlankLines(ICommentUncommentService service, List<TextChange> textChanges, Tuple<ITextSnapshotLine, ITextSnapshotLine> firstAndLastLine, int indentToCommentAt) { for (int lineNumber = firstAndLastLine.Item1.LineNumber; lineNumber <= firstAndLastLine.Item2.LineNumber; ++lineNumber) { var line = firstAndLastLine.Item1.Snapshot.GetLineFromLineNumber(lineNumber); if (!line.IsEmptyOrWhitespace()) { InsertText(textChanges, line.Start + indentToCommentAt, service.SingleLineCommentString); } } } /// <summary> Given a set of lines, find the minimum indent of all of the non-blank, non-whitespace lines.</summary> private static int DetermineSmallestIndent(SnapshotSpan span, Tuple<ITextSnapshotLine, ITextSnapshotLine> firstAndLastLine) { // TODO: This breaks if you have mixed tabs/spaces, and/or tabsize != indentsize. var indentToCommentAt = int.MaxValue; for (int lineNumber = firstAndLastLine.Item1.LineNumber; lineNumber <= firstAndLastLine.Item2.LineNumber; ++lineNumber) { var line = span.Snapshot.GetLineFromLineNumber(lineNumber); var firstNonWhitespacePosition = line.GetFirstNonWhitespacePosition(); var firstNonWhitespaceOnLine = firstNonWhitespacePosition.HasValue ? firstNonWhitespacePosition.Value - line.Start : int.MaxValue; indentToCommentAt = Math.Min(indentToCommentAt, firstNonWhitespaceOnLine); } return indentToCommentAt; } /// <summary> /// Given a span, find the first and last line that are part of the span. NOTE: If the span ends in column zero, /// we back up to the previous line, to handle the case where the user used shift + down to select a bunch of /// lines. They probably don't want the last line commented in that case. /// </summary> private static Tuple<ITextSnapshotLine, ITextSnapshotLine> DetermineFirstAndLastLine(SnapshotSpan span) { var firstLine = span.Snapshot.GetLineFromPosition(span.Start.Position); var lastLine = span.Snapshot.GetLineFromPosition(span.End.Position); if (lastLine.Start == span.End.Position && !span.IsEmpty) { lastLine = lastLine.GetPreviousMatchingLine(_ => true); } return Tuple.Create(firstLine, lastLine); } /// <summary> /// Returns true if the span includes all of the non-whitespace text on the first and last line. /// </summary> private static bool SpanIncludesAllTextOnIncludedLines(SnapshotSpan span) { var firstAndLastLine = DetermineFirstAndLastLine(span); var firstNonWhitespacePosition = firstAndLastLine.Item1.GetFirstNonWhitespacePosition(); var lastNonWhitespacePosition = firstAndLastLine.Item2.GetLastNonWhitespacePosition(); var allOnFirst = !firstNonWhitespacePosition.HasValue || span.Start.Position <= firstNonWhitespacePosition.Value; var allOnLast = !lastNonWhitespacePosition.HasValue || span.End.Position > lastNonWhitespacePosition.Value; return allOnFirst && allOnLast; } } }
// --------------------------------------------------------------------------- // <copyright file="Session.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> // --------------------------------------------------------------------------- // --------------------------------------------------------------------- // <summary> // </summary> // --------------------------------------------------------------------- namespace Microsoft.Database.Isam { using System; using Microsoft.Isam.Esent.Interop; /// <summary> /// A Session is the transactional context for the ISAM. It can be used to /// begin, commit, or abort transactions that influence when changes made /// to databases are kept or discarded. /// <para> /// The session object currently also controls which databases can be /// accessed by the ISAM. /// </para> /// </summary> public class IsamSession : IDisposable { /// <summary> /// The instance /// </summary> private readonly IsamInstance isamInstance; /// <summary> /// The sesid /// </summary> private readonly JET_SESID sesid; /// <summary> /// The cleanup /// </summary> private bool cleanup = false; /// <summary> /// The disposed /// </summary> private bool disposed = false; /// <summary> /// The temporary database /// </summary> private TemporaryDatabase temporaryDatabase = null; /// <summary> /// The transaction level /// </summary> private long transactionLevel = 0; /// <summary> /// The transaction level identifier /// </summary> private long[] transactionLevelID = null; /// <summary> /// Initializes a new instance of the <see cref="IsamSession"/> class. /// </summary> /// <param name="isamInstance">The instance.</param> internal IsamSession(IsamInstance isamInstance) { lock (isamInstance) { this.isamInstance = isamInstance; Api.JetBeginSession(isamInstance.Inst, out this.sesid, null, null); this.cleanup = true; this.transactionLevelID = new long[7]; // JET only supports 7 levels } } /// <summary> /// Finalizes an instance of the IsamSession class. /// </summary> ~IsamSession() { this.Dispose(false); } /// <summary> /// Gets the instance that created this session. /// </summary> public IsamInstance IsamInstance { get { return this.isamInstance; } } /// <summary> /// Gets the ID of the session's current transaction. /// </summary> /// <remarks> /// The transaction ID is incremented every time the session's current /// transaction save point (level) reaches zero such that the session /// is no longer considered to be in a transaction. /// </remarks> public long TransactionID { get { this.CheckDisposed(); return this.transactionLevelID[0]; } } /// <summary> /// Gets the save point (level) of the session's current transaction. /// </summary> /// <remarks> /// Every time a new transaction is begun, the save point (level) of /// the session's current transaction is increased. Every time a /// transaction is successfully committed or aborted, the save point /// (level) of the session's current transaction is decreased. If the /// save point (level) of the session's current transaction is zero /// then the session is not considered to be in a transaction. /// However, individual operations performed using the session will /// still be in a transaction. /// </remarks> public long TransactionLevel { get { this.CheckDisposed(); return this.transactionLevel; } } /// <summary> /// Gets the sesid. /// </summary> /// <value> /// The sesid. /// </value> internal JET_SESID Sesid { get { return this.sesid; } } /// <summary> /// Gets or sets a value indicating whether [disposed]. /// </summary> /// <value> /// <c>true</c> if [disposed]; otherwise, <c>false</c>. /// </value> internal bool Disposed { get { return this.disposed || this.isamInstance.Disposed; } set { this.disposed = value; } } /// <summary> /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. /// </summary> public void Dispose() { lock (this) { this.Dispose(true); } GC.SuppressFinalize(this); } /// <summary> /// Creates a new database at the specified location /// </summary> /// <param name="databaseName">The file name (relative or absolute) at which the database will be created</param> /// <remarks> /// The new database will automatically be attached to the instance. /// See Session.AttachDatabase for more information. /// </remarks> public void CreateDatabase(string databaseName) { lock (this) { this.CheckDisposed(); JET_DBID dbid; Api.JetCreateDatabase(this.Sesid, databaseName, null, out dbid, CreateDatabaseGrbit.None); Api.JetCloseDatabase(this.Sesid, dbid, CloseDatabaseGrbit.None); } } /// <summary> /// Attaches an existing database at the specified location /// </summary> /// <param name="databaseName">The file name (relative or absolute) at which the database will be attached</param> /// <remarks> /// Attaching a database to the instance enables that database to be /// opened for access. When a database is attached, its file is /// opened and so must be available to be locked as required. The /// file will be held open until the database is detached or until the /// instance is disposed. /// </remarks> public void AttachDatabase(string databaseName) { lock (this) { this.CheckDisposed(); AttachDatabaseGrbit grbit = AttachDatabaseGrbit.None; if (this.isamInstance.ReadOnly) { grbit |= AttachDatabaseGrbit.ReadOnly; } Api.JetAttachDatabase(this.Sesid, databaseName, grbit); } } /// <summary> /// Detaches an attached database at the specified location /// </summary> /// <param name="databaseName">The file name (relative or absolute) at which the database will be detached</param> /// <remarks> /// Detaching a database from the instance will close its file and /// will make it no longer possible to open that database. /// </remarks> public void DetachDatabase(string databaseName) { lock (this) { this.CheckDisposed(); Api.JetDetachDatabase(this.Sesid, databaseName); } } /// <summary> /// Determines if there is a database at the specified location /// </summary> /// <param name="databaseName">The file name (relative or absolute) at which the database may exist</param> /// <returns>true if the database exists and is a valid database file, false otherwise</returns> public bool Exists(string databaseName) { lock (this) { this.CheckDisposed(); try { AttachDatabaseGrbit grbit = AttachDatabaseGrbit.None; if (this.isamInstance.ReadOnly) { grbit |= AttachDatabaseGrbit.ReadOnly; } Api.JetAttachDatabase(this.Sesid, databaseName, grbit); try { Api.JetDetachDatabase(this.Sesid, databaseName); } catch (EsentDatabaseInUseException) { } return true; } catch (EsentFileNotFoundException) { return false; } catch (EsentDatabaseInvalidPathException) { return false; } catch (EsentErrorException) { return true; } } } /// <summary> /// Opens the database. /// </summary> /// <param name="databaseName">The file name (relative or absolute) at which the database will be opened</param> /// <returns> /// a Database object representing the database for this session /// </returns> /// <remarks> /// A database must first be attached (or created) before it can be /// opened successfully. /// </remarks> public IsamDatabase OpenDatabase(string databaseName) { lock (this) { this.CheckDisposed(); return new IsamDatabase(this, databaseName); } } /// <summary> /// Opens the temporary database. /// </summary> /// <returns> /// A TemporaryDatabase object representing the temporary database for this session /// </returns> /// <remarks> /// Only one temporary database is supported per instance. /// </remarks> public TemporaryDatabase OpenTemporaryDatabase() { lock (this) { this.CheckDisposed(); if (this.temporaryDatabase == null) { this.temporaryDatabase = new TemporaryDatabase(this); } return this.temporaryDatabase; } } /// <summary> /// Begins a new save point (level) for the current transaction on this /// session. Any changes made to the database for this save point /// (level) may later be kept or discarded by committing or aborting the /// save point (level). /// </summary> /// <remarks> /// Currently, there is a limit to how many save points (levels) are /// supported by the ISAM. Approximately seven save points (levels) /// are supported. Some ISAM functions also use some of these so the /// effective limit will vary with circumstance. /// </remarks> public void BeginTransaction() { lock (this) { this.CheckDisposed(); Api.JetBeginTransaction(this.Sesid); this.transactionLevel++; } } /// <summary> /// Commits the current save point (level) of the current transaction /// on this session. All changes made to the database for this save /// point (level) will be kept. /// </summary> /// <remarks> /// <para> /// It is illegal to call this method when the session is not currently /// in a transaction. Use Session.TransactionLevel to determine the /// current transaction state of a session. /// </para> /// <para> /// Changes made to the database will become permanent if and only if /// those changes are committed to save point (level) zero. /// </para> /// <para> /// A commit to save point (level) zero is guaranteed to be persisted /// to the database upon completion of this method. /// </para> /// </remarks> public void CommitTransaction() { this.CommitTransaction(true); } /// <summary> /// Commits the current save point (level) of the current transaction /// on this session. All changes made to the database for this save /// point (level) will be kept. /// </summary> /// <param name="durableCommit"> /// When true, a commit to save point (level) zero is guaranteed to be /// persisted to the database upon completion of this method. /// </param> /// <remarks> /// <para> /// It is illegal to call this method when the session is not currently /// in a transaction. Use Session.TransactionLevel to determine the /// current transaction state of a session. /// </para> /// <para> /// A commit to save point (level) zero is guaranteed to be persisted /// to the database upon completion of this method only if /// durableCommit is true. If durableCommit is false then the changes /// will only be persisted to the database if their transaction log /// entries happen to be written to disk before a crash or if the /// database is shut down cleanly. /// </para> /// </remarks> public void CommitTransaction(bool durableCommit) { lock (this) { this.CheckDisposed(); CommitTransactionGrbit grbit = CommitTransactionGrbit.None; if (!durableCommit) { grbit |= CommitTransactionGrbit.LazyFlush; } Api.JetCommitTransaction(this.Sesid, grbit); this.transactionLevelID[--this.transactionLevel]++; } } /// <summary> /// Aborts the current save point (level) of the current transaction on /// this session. All changes made to the database for this save point /// (level) will be discarded. /// </summary> /// <remarks> /// It is illegal to call this method when the session is not currently /// in a transaction. Use Session.TransactionLevel to determine the /// current transaction state of a session. /// </remarks> public void RollbackTransaction() { lock (this) { this.CheckDisposed(); Api.JetRollback(this.Sesid, RollbackTransactionGrbit.None); this.transactionLevelID[--this.transactionLevel]++; } } /// <summary> /// Aborts the current save point (level) of the current transaction on /// this session. All changes made to the database for this save point /// (level) will be discarded. /// </summary> /// <remarks> /// It is illegal to call this method when the session is not currently /// in a transaction. Use Session.TransactionLevel to determine the /// current transaction state of a session. /// <para> /// Session.AbortTransaction is an alias for /// <see cref="IsamSession.RollbackTransaction"/>. /// </para> /// </remarks> public void AbortTransaction() { this.RollbackTransaction(); } /// <summary> /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. /// </summary> void IDisposable.Dispose() { this.Dispose(); } /// <summary>Gets the Transaction ID at the specifed Transaction Level. /// </summary> /// <param name="level">The Transaction Level.</param> /// <returns>The Transaction ID at the specifed Transaction Level.</returns> internal long TransactionLevelID(long level) { return this.transactionLevelID[level - 1]; } /// <summary> /// Releases unmanaged and - optionally - managed resources. /// </summary> /// <param name="disposing"><c>true</c> to release both managed and unmanaged resources; <c>false</c> to release only unmanaged resources.</param> protected virtual void Dispose(bool disposing) { lock (this) { if (!this.Disposed) { if (this.cleanup) { if (this.temporaryDatabase != null) { this.temporaryDatabase.Dispose(); } Api.JetEndSession(this.sesid, EndSessionGrbit.None); this.cleanup = false; } this.Disposed = true; } } } /// <summary> /// Checks whether this object is disposed. /// </summary> /// <exception cref="System.ObjectDisposedException">If the object has already been disposed.</exception> private void CheckDisposed() { lock (this) { if (this.Disposed) { throw new ObjectDisposedException(this.GetType().Name); } } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Internal.NativeCrypto; namespace System.Security.Cryptography { #if INTERNAL_ASYMMETRIC_IMPLEMENTATIONS internal static partial class ECDiffieHellmanImplementation { #endif public sealed partial class ECDiffieHellmanCng : ECDiffieHellman { public override void ImportParameters(ECParameters parameters) { parameters.Validate(); ECCurve curve = parameters.Curve; bool includePrivateParamerters = (parameters.D != null); if (curve.IsPrime) { byte[] ecExplicitBlob = ECCng.GetPrimeCurveBlob(ref parameters, ecdh: true); ImportFullKeyBlob(ecExplicitBlob, includePrivateParamerters); } else if (curve.IsNamed) { // FriendlyName is required; an attempt was already made to default it in ECCurve if (string.IsNullOrEmpty(curve.Oid.FriendlyName)) { throw new PlatformNotSupportedException( SR.Format(SR.Cryptography_InvalidCurveOid, curve.Oid.Value)); } byte[] ecNamedCurveBlob = ECCng.GetNamedCurveBlob(ref parameters, ecdh: true); ImportKeyBlob(ecNamedCurveBlob, curve.Oid.FriendlyName, includePrivateParamerters); } else { throw new PlatformNotSupportedException( SR.Format(SR.Cryptography_CurveNotSupported, curve.CurveType.ToString())); } } public override ECParameters ExportExplicitParameters(bool includePrivateParameters) { byte[] blob = ExportFullKeyBlob(includePrivateParameters); try { ECParameters ecparams = new ECParameters(); ECCng.ExportPrimeCurveParameters(ref ecparams, blob, includePrivateParameters); return ecparams; } finally { Array.Clear(blob, 0, blob.Length); } } public override ECParameters ExportParameters(bool includePrivateParameters) { ECParameters ecparams = new ECParameters(); string curveName = GetCurveName(out string oidValue); byte[] blob = null; try { if (string.IsNullOrEmpty(curveName)) { blob = ExportFullKeyBlob(includePrivateParameters); ECCng.ExportPrimeCurveParameters(ref ecparams, blob, includePrivateParameters); } else { blob = ExportKeyBlob(includePrivateParameters); ECCng.ExportNamedCurveParameters(ref ecparams, blob, includePrivateParameters); ecparams.Curve = ECCurve.CreateFromOid(new Oid(oidValue, curveName)); } return ecparams; } finally { if (blob != null) { Array.Clear(blob, 0, blob.Length); } } } public override void ImportPkcs8PrivateKey(ReadOnlySpan<byte> source, out int bytesRead) { CngPkcs8.Pkcs8Response response = CngPkcs8.ImportPkcs8PrivateKey(source, out int localRead); ProcessPkcs8Response(response); bytesRead = localRead; } public override void ImportEncryptedPkcs8PrivateKey( ReadOnlySpan<byte> passwordBytes, ReadOnlySpan<byte> source, out int bytesRead) { CngPkcs8.Pkcs8Response response = CngPkcs8.ImportEncryptedPkcs8PrivateKey( passwordBytes, source, out int localRead); ProcessPkcs8Response(response); bytesRead = localRead; } public override void ImportEncryptedPkcs8PrivateKey( ReadOnlySpan<char> password, ReadOnlySpan<byte> source, out int bytesRead) { CngPkcs8.Pkcs8Response response = CngPkcs8.ImportEncryptedPkcs8PrivateKey( password, source, out int localRead); ProcessPkcs8Response(response); bytesRead = localRead; } private void ProcessPkcs8Response(CngPkcs8.Pkcs8Response response) { // Wrong algorithm? if (response.GetAlgorithmGroup() != BCryptNative.AlgorithmName.ECDH) { response.FreeKey(); throw new CryptographicException(SR.Cryptography_NotValidPublicOrPrivateKey); } AcceptImport(response); } public override byte[] ExportEncryptedPkcs8PrivateKey( ReadOnlySpan<byte> passwordBytes, PbeParameters pbeParameters) { if (pbeParameters == null) throw new ArgumentNullException(nameof(pbeParameters)); return CngPkcs8.ExportEncryptedPkcs8PrivateKey( this, passwordBytes, pbeParameters); } public override byte[] ExportEncryptedPkcs8PrivateKey( ReadOnlySpan<char> password, PbeParameters pbeParameters) { if (pbeParameters == null) { throw new ArgumentNullException(nameof(pbeParameters)); } PasswordBasedEncryption.ValidatePbeParameters( pbeParameters, password, ReadOnlySpan<byte>.Empty); if (CngPkcs8.IsPlatformScheme(pbeParameters)) { return ExportEncryptedPkcs8(password, pbeParameters.IterationCount); } return CngPkcs8.ExportEncryptedPkcs8PrivateKey( this, password, pbeParameters); } public override bool TryExportEncryptedPkcs8PrivateKey( ReadOnlySpan<byte> passwordBytes, PbeParameters pbeParameters, Span<byte> destination, out int bytesWritten) { if (pbeParameters == null) throw new ArgumentNullException(nameof(pbeParameters)); PasswordBasedEncryption.ValidatePbeParameters( pbeParameters, ReadOnlySpan<char>.Empty, passwordBytes); return CngPkcs8.TryExportEncryptedPkcs8PrivateKey( this, passwordBytes, pbeParameters, destination, out bytesWritten); } public override bool TryExportEncryptedPkcs8PrivateKey( ReadOnlySpan<char> password, PbeParameters pbeParameters, Span<byte> destination, out int bytesWritten) { if (pbeParameters == null) throw new ArgumentNullException(nameof(pbeParameters)); PasswordBasedEncryption.ValidatePbeParameters( pbeParameters, password, ReadOnlySpan<byte>.Empty); if (CngPkcs8.IsPlatformScheme(pbeParameters)) { return TryExportEncryptedPkcs8( password, pbeParameters.IterationCount, destination, out bytesWritten); } return CngPkcs8.TryExportEncryptedPkcs8PrivateKey( this, password, pbeParameters, destination, out bytesWritten); } } #if INTERNAL_ASYMMETRIC_IMPLEMENTATIONS } #endif }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Buffers; using System.Collections.Generic; using System.Linq; using System.Memory.Tests.SequenceReader; using Xunit; namespace System.Text.Tests { public class EncodingExtensionsTests { private static readonly char[] AllScalarsAsUtf16 = CreateAllScalarsAsUtf16(); // 2,160,640 chars private static readonly byte[] AllScalarsAsUtf8 = Encoding.UTF8.GetBytes(AllScalarsAsUtf16); // 4,382,592 bytes private static char[] CreateAllScalarsAsUtf16() { List<char> list = new List<char>(2_160_640); // Add U+0000 .. U+D7FF for (int i = 0; i < 0xD800; i++) { list.Add((char)i); } // Add U+E000 .. U+10FFFF Span<char> scratch = stackalloc char[2]; // max UTF-16 sequence length for (int i = 0xE000; i <= 0x10FFFF; i++) { foreach (char ch in scratch.Slice(0, new Rune(i).EncodeToUtf16(scratch))) { list.Add(ch); } } char[] allScalarsAsChars = list.ToArray(); // U+0000 .. U+D7FF = 55,296 1-char sequences // U+E000 .. U+FFFF = 8,192 1-char sequences // U+10000 .. U+10FFFF = 1,048,576 2-char sequences // total = 2,160,640 chars to encode all scalars as UTF-16 // // U+0000 .. U+007F = 128 1-byte sequences // U+0080 .. U+07FF = 1,920 2-byte sequences // U+0800 .. U+D7FF = 53,247 3-byte sequences // U+E000 .. U+FFFF = 8,192 3-byte sequences // U+10000 .. U+10FFFF = 1,048,576 4-byte sequences // total = 4,382,592 bytes to encode all scalars as UTF-8 Assert.Equal(2_160_640, allScalarsAsChars.Length); Assert.Equal(4_382_592, Encoding.UTF8.GetByteCount(allScalarsAsChars)); return allScalarsAsChars; } [Fact] public static void Convert_Decoder_ReadOnlySpan_IBufferWriter_ParamChecks() { Decoder decoder = Encoding.UTF8.GetDecoder(); IBufferWriter<char> writer = new ArrayBufferWriter<char>(); Assert.Throws<ArgumentNullException>("decoder", () => EncodingExtensions.Convert((Decoder)null, ReadOnlySpan<byte>.Empty, writer, true, out _, out _)); Assert.Throws<ArgumentNullException>("writer", () => EncodingExtensions.Convert(decoder, ReadOnlySpan<byte>.Empty, (IBufferWriter<char>)null, true, out _, out _)); } [Fact] public static void Convert_Decoder_ReadOnlySpan_IBufferWriter() { Decoder decoder = Encoding.UTF8.GetDecoder(); ArrayBufferWriter<char> writer = new ArrayBufferWriter<char>(); // First, a small input with no flushing and no leftover data. ReadOnlySpan<byte> inputData = Encoding.UTF8.GetBytes("Hello"); EncodingExtensions.Convert(decoder, inputData, writer, flush: false, out long charsUsed, out bool completed); Assert.Equal(5, charsUsed); Assert.True(completed); // Then, a large input with no flushing and leftover data. inputData = Encoding.UTF8.GetBytes(new string('x', 20_000_000)).Concat(new byte[] { 0xE0, 0xA0 }).ToArray(); EncodingExtensions.Convert(decoder, inputData, writer, flush: false, out charsUsed, out completed); Assert.Equal(20_000_000, charsUsed); Assert.False(completed); // Then, a large input with flushing and leftover data (should be replaced). inputData = new byte[] { 0x80 }.Concat(Encoding.UTF8.GetBytes(new string('x', 20_000_000))).Concat(new byte[] { 0xE0 }).ToArray(); EncodingExtensions.Convert(decoder, inputData, writer, flush: true, out charsUsed, out completed); Assert.Equal(20_000_002, charsUsed); // 1 for leftover at beginning, 1 for replacement at end Assert.True(completed); // Now make sure all of the data was decoded properly. Assert.Equal( expected: "Hello" + new string('x', 20_000_000) + '\u0800' + new string('x', 20_000_000) + '\ufffd', actual: writer.WrittenSpan.ToString()); } [Fact] public static void Convert_Decoder_ReadOnlySequence_IBufferWriter_ParamChecks() { Decoder decoder = Encoding.UTF8.GetDecoder(); IBufferWriter<char> writer = new ArrayBufferWriter<char>(); Assert.Throws<ArgumentNullException>("decoder", () => EncodingExtensions.Convert((Decoder)null, ReadOnlySequence<byte>.Empty, writer, true, out _, out _)); Assert.Throws<ArgumentNullException>("writer", () => EncodingExtensions.Convert(decoder, ReadOnlySequence<byte>.Empty, (IBufferWriter<char>)null, true, out _, out _)); } [Fact] public static void Convert_Decoder_ReadOnlySequence_IBufferWriter() { Decoder decoder = Encoding.UTF8.GetDecoder(); ArrayBufferWriter<char> writer = new ArrayBufferWriter<char>(); // First, input with no flushing and no leftover data. ReadOnlySequence<byte> inputData = SequenceFactory.Create( new byte[] { 0x20 }, // U+0020 new byte[] { 0x61, 0xC2 }, // U+0061 and U+0080 (continues on next line) new byte[] { 0x80, 0xED, 0x9F, 0xBF }); // (cont.) + U+D7FF EncodingExtensions.Convert(decoder, inputData, writer, flush: false, out long charsUsed, out bool completed); Assert.Equal(4, charsUsed); Assert.True(completed); // Then, input with no flushing and leftover data. inputData = SequenceFactory.Create( new byte[] { 0xF4, 0x80 }); // U+100000 (continues on next line) EncodingExtensions.Convert(decoder, inputData, writer, flush: false, out charsUsed, out completed); Assert.Equal(0, charsUsed); Assert.False(completed); // Then, input with flushing and leftover data (should be replaced). inputData = SequenceFactory.Create( new byte[] { 0x80, 0x80 }, // (cont.) new byte[] { 0xC2 }); // leftover data (should be replaced) EncodingExtensions.Convert(decoder, inputData, writer, flush: true, out charsUsed, out completed); Assert.Equal(3, charsUsed); Assert.True(completed); // Now make sure all of the data was decoded properly. Assert.Equal("\u0020\u0061\u0080\ud7ff\U00100000\ufffd", writer.WrittenSpan.ToString()); } [Fact] public static void Convert_Encoder_ReadOnlySpan_IBufferWriter_ParamChecks() { Encoder encoder = Encoding.UTF8.GetEncoder(); IBufferWriter<byte> writer = new ArrayBufferWriter<byte>(); Assert.Throws<ArgumentNullException>("encoder", () => EncodingExtensions.Convert((Encoder)null, ReadOnlySpan<char>.Empty, writer, true, out _, out _)); Assert.Throws<ArgumentNullException>("writer", () => EncodingExtensions.Convert(encoder, ReadOnlySpan<char>.Empty, (IBufferWriter<byte>)null, true, out _, out _)); } [Fact] public static void Convert_Encoder_ReadOnlySpan_IBufferWriter() { Encoder encoder = Encoding.UTF8.GetEncoder(); ArrayBufferWriter<byte> writer = new ArrayBufferWriter<byte>(); // First, a small input with no flushing and no leftover data. ReadOnlySpan<char> inputData = "Hello"; EncodingExtensions.Convert(encoder, inputData, writer, flush: false, out long bytesUsed, out bool completed); Assert.Equal(5, bytesUsed); Assert.True(completed); // Then, a large input with no flushing and leftover data. inputData = new string('x', 20_000_000) + '\ud800'; EncodingExtensions.Convert(encoder, inputData, writer, flush: false, out bytesUsed, out completed); Assert.Equal(20_000_000, bytesUsed); Assert.False(completed); // Then, a large input with flushing and leftover data (should be replaced). inputData = '\udc00' + new string('x', 20_000_000) + '\ud800'; EncodingExtensions.Convert(encoder, inputData, writer, flush: true, out bytesUsed, out completed); Assert.Equal(20_000_007, bytesUsed); // 4 for supplementary at beginning, 3 for replacement at end Assert.True(completed); // Now make sure all of the data was encoded properly. // Use SequenceEqual instead of Assert.Equal for perf. Assert.True( Encoding.UTF8.GetBytes("Hello" + new string('x', 20_000_000) + "\U00010000" + new string('x', 20_000_000) + '\ufffd').AsSpan().SequenceEqual(writer.WrittenSpan)); } [Fact] public static void Convert_Encoder_ReadOnlySequence_IBufferWriter_ParamChecks() { Encoder encoder = Encoding.UTF8.GetEncoder(); IBufferWriter<byte> writer = new ArrayBufferWriter<byte>(); Assert.Throws<ArgumentNullException>("encoder", () => EncodingExtensions.Convert((Encoder)null, ReadOnlySequence<char>.Empty, writer, true, out _, out _)); Assert.Throws<ArgumentNullException>("writer", () => EncodingExtensions.Convert(encoder, ReadOnlySequence<char>.Empty, (IBufferWriter<byte>)null, true, out _, out _)); } [Fact] public static void Convert_Encoder_ReadOnlySequence_IBufferWriter() { Encoder encoder = Encoding.UTF8.GetEncoder(); ArrayBufferWriter<byte> writer = new ArrayBufferWriter<byte>(); // First, input with no flushing and no leftover data. ReadOnlySequence<char> inputData = SequenceFactory.Create( new char[] { '\u0020' }, // U+0020 new char[] { '\ud7ff' }); // U+D7FF EncodingExtensions.Convert(encoder, inputData, writer, flush: false, out long bytesUsed, out bool completed); Assert.Equal(4, bytesUsed); Assert.True(completed); // Then, input with no flushing and leftover data. inputData = SequenceFactory.Create( new char[] { '\udbc0' }); // U+100000 (continues on next line) EncodingExtensions.Convert(encoder, inputData, writer, flush: false, out bytesUsed, out completed); Assert.Equal(0, bytesUsed); Assert.False(completed); // Then, input with flushing and leftover data (should be replaced). inputData = SequenceFactory.Create( new char[] { '\udc00' }, // (cont.) new char[] { '\ud800' }); // leftover data (should be replaced) EncodingExtensions.Convert(encoder, inputData, writer, flush: true, out bytesUsed, out completed); Assert.Equal(7, bytesUsed); Assert.True(completed); // Now make sure all of the data was decoded properly. Assert.Equal(Encoding.UTF8.GetBytes("\u0020\ud7ff\U00100000\ufffd"), writer.WrittenSpan.ToArray()); } [Fact] public static void GetBytes_Encoding_ReadOnlySequence_ParamChecks() { ReadOnlySequence<char> sequence = new ReadOnlySequence<char>(new char[0]); Assert.Throws<ArgumentNullException>("encoding", () => EncodingExtensions.GetBytes(null, sequence)); } [Fact] public static void GetBytes_Encoding_ReadOnlySequence() { // First try the single-segment code path. ReadOnlySequence<char> sequence = new ReadOnlySequence<char>("Hello!".ToCharArray()); Assert.Equal(Encoding.UTF8.GetBytes("Hello!"), EncodingExtensions.GetBytes(Encoding.UTF8, sequence)); // Next try the multi-segment code path. // We've intentionally split multi-char subsequences here to test flushing mechanisms. sequence = SequenceFactory.Create( new char[] { '\u0020' }, // U+0020 new char[] { '\u0061', '\u0080' }, // U+0061 and U+0080 (continues on next line) new char[] { '\ud800' }, // U+10000 (continues on next line) new char[] { }, // empty segment, just to make sure we handle it correctly new char[] { '\udc00', '\udbff' }, // (cont.) + U+10FFFF (continues on next line) new char[] { '\udfff' }, // (cont.) new char[] { '\ud800' }); // leftover data (should be replaced) Assert.Equal(Encoding.UTF8.GetBytes("\u0020\u0061\u0080\U00010000\U0010FFFF\ufffd"), EncodingExtensions.GetBytes(Encoding.UTF8, sequence)); } [Fact] public static void GetBytes_Encoding_ReadOnlySequence_IBufferWriter_SingleSegment() { ReadOnlySequence<char> sequence = new ReadOnlySequence<char>("Hello".ToCharArray()); ArrayBufferWriter<byte> writer = new ArrayBufferWriter<byte>(); long bytesWritten = EncodingExtensions.GetBytes(Encoding.UTF8, sequence, writer); Assert.Equal(5, bytesWritten); Assert.Equal(Encoding.UTF8.GetBytes("Hello"), writer.WrittenSpan.ToArray()); } [Fact] [OuterLoop] // this test takes ~10 seconds on modern hardware since it operates over GBs of data public static void GetBytes_Encoding_ReadOnlySequence_IBufferWriter_LargeMultiSegment() { ReadOnlySequence<char> sequence = GetLargeRepeatingReadOnlySequence<char>(AllScalarsAsUtf16, 1500); // ~ 3.2bn chars of UTF-16 input RepeatingValidatingBufferWriter<byte> writer = new RepeatingValidatingBufferWriter<byte>(AllScalarsAsUtf8); long expectedBytesWritten = 1500 * (long)AllScalarsAsUtf8.Length; long actualBytesWritten = EncodingExtensions.GetBytes(Encoding.UTF8, sequence, writer); Assert.Equal(expectedBytesWritten, actualBytesWritten); Assert.Equal(expectedBytesWritten, writer.TotalElementsWritten); // our writer will validate as data is written to it } [Fact] public static void GetBytes_Encoding_ReadOnlySequence_IBufferWriter_ParamChecks() { ReadOnlySequence<char> sequence = new ReadOnlySequence<char>(new char[0]); IBufferWriter<byte> writer = new ArrayBufferWriter<byte>(); Assert.Throws<ArgumentNullException>("encoding", () => EncodingExtensions.GetBytes((Encoding)null, sequence, writer)); Assert.Throws<ArgumentNullException>("writer", () => EncodingExtensions.GetBytes(Encoding.UTF8, sequence, (IBufferWriter<byte>)null)); } [Fact] public static void GetBytes_Encoding_ReadOnlySequence_Span_ParamChecks() { ReadOnlySequence<char> sequence = new ReadOnlySequence<char>(new char[0]); Assert.Throws<ArgumentNullException>("encoding", () => EncodingExtensions.GetBytes((Encoding)null, sequence, Span<byte>.Empty)); } [Fact] public static void GetBytes_Encoding_ReadOnlySequence_Span() { Span<byte> destination = stackalloc byte[32]; // First try the single-segment code path. ReadOnlySequence<char> sequence = new ReadOnlySequence<char>("Hello!".ToCharArray()); Assert.Equal( expected: Encoding.UTF8.GetBytes("Hello!"), actual: destination.Slice(0, EncodingExtensions.GetBytes(Encoding.UTF8, sequence, destination)).ToArray()); // Next try the multi-segment code path. // We've intentionally split multi-char subsequences here to test flushing mechanisms. sequence = SequenceFactory.Create( new char[] { '\u0020' }, // U+0020 new char[] { '\u0061', '\u0080' }, // U+0061 and U+0080 (continues on next line) new char[] { '\ud800' }, // U+10000 (continues on next line) new char[] { }, // empty segment, just to make sure we handle it correctly new char[] { '\udc00', '\udbff' }, // (cont.) + U+10FFFF (continues on next line) new char[] { '\udfff' }, // (cont.) new char[] { '\ud800' }); // leftover data (should be replaced) Assert.Equal( expected: Encoding.UTF8.GetBytes("\u0020\u0061\u0080\U00010000\U0010FFFF\ufffd"), actual: destination.Slice(0, EncodingExtensions.GetBytes(Encoding.UTF8, sequence, destination)).ToArray()); } [Fact] public static void GetBytes_Encoding_ReadOnlySpan_IBufferWriter_ParamChecks() { IBufferWriter<byte> writer = new ArrayBufferWriter<byte>(); Assert.Throws<ArgumentNullException>("encoding", () => EncodingExtensions.GetBytes((Encoding)null, ReadOnlySpan<char>.Empty, writer)); Assert.Throws<ArgumentNullException>("writer", () => EncodingExtensions.GetBytes(Encoding.UTF8, ReadOnlySpan<char>.Empty, (IBufferWriter<byte>)null)); } [Fact] public static void GetBytes_Encoding_ReadOnlySpan_IBufferWriter() { ArrayBufferWriter<byte> writer = new ArrayBufferWriter<byte>(); // First, a small input that goes through the one-shot code path. ReadOnlySpan<char> inputData = "Hello"; long bytesWritten = EncodingExtensions.GetBytes(Encoding.UTF8, inputData, writer); Assert.Equal(5, bytesWritten); Assert.Equal(Encoding.UTF8.GetBytes("Hello"), writer.WrittenSpan.ToArray()); // Then, a large input that goes through the chunked path. // We alternate between 1-char and 2-char sequences so that the input will be split in // several locations by the internal GetChars chunking logic. This helps us test // that we're flowing the 'flush' parameter through the system correctly. string largeString = string.Create(5_000_000, (object)null, (span, _) => { while (span.Length >= 3) { span[0] = '\u00EA'; // U+00EA LATIN SMALL LETTER E WITH CIRCUMFLEX span[1] = '\uD83D'; // U+1F405 TIGER span[2] = '\uDC05'; span = span.Slice(3); } // There are 2 bytes left over. Assert.Equal(2, span.Length); span[0] = 'x'; span[1] = 'y'; }); writer = new ArrayBufferWriter<byte>(); inputData = largeString + '\uD800'; // standalone lead surrogate at end of input, testing replacement bytesWritten = EncodingExtensions.GetBytes(Encoding.UTF8, inputData, writer); Assert.Equal(10_000_001, bytesWritten); // 9,999,998 for data + 3 for repalcement char at end // Now make sure all of the data was encoded properly. Assert.True(Encoding.UTF8.GetBytes(largeString + "\ufffd").AsSpan().SequenceEqual(writer.WrittenSpan)); } [Fact] public static void GetString_Encoding_ReadOnlySequence() { // First try the single-segment code path. ReadOnlySequence<byte> sequence = new ReadOnlySequence<byte>(Encoding.UTF8.GetBytes("Hello!")); Assert.Equal("Hello!", EncodingExtensions.GetString(Encoding.UTF8, sequence)); // Next try the multi-segment code path. // We've intentionally split multi-byte subsequences here to test flushing mechanisms. sequence = SequenceFactory.Create( new byte[] { 0x20 }, // U+0020 new byte[] { 0x61, 0xC2 }, // U+0061 and U+0080 (continues on next line) new byte[] { 0x80, 0xED }, // (cont.) + U+D7FF (continues on next line) new byte[] { }, // empty segment, just to make sure we handle it correctly new byte[] { 0x9F, 0xBF, 0xF4, 0x80 }, // (cont.) + U+100000 (continues on next line) new byte[] { 0x80, 0x80 }, // (cont.) new byte[] { 0xC2 }); // leftover data (should be replaced) Assert.Equal("\u0020\u0061\u0080\ud7ff\U00100000\ufffd", EncodingExtensions.GetString(Encoding.UTF8, sequence)); } [Fact] public static void GetString_Encoding_ReadOnlySequence_ParamChecks() { ReadOnlySequence<byte> sequence = new ReadOnlySequence<byte>(new byte[0]); Assert.Throws<ArgumentNullException>("encoding", () => EncodingExtensions.GetString(null, sequence)); } [Fact] public static void GetChars_Encoding_ReadOnlySequence_IBufferWriter_SingleSegment() { ReadOnlySequence<byte> sequence = new ReadOnlySequence<byte>(Encoding.UTF8.GetBytes("Hello")); ArrayBufferWriter<char> writer = new ArrayBufferWriter<char>(); long charsWritten = EncodingExtensions.GetChars(Encoding.UTF8, sequence, writer); Assert.Equal(5, charsWritten); Assert.Equal("Hello", writer.WrittenSpan.ToString()); } [Fact] [OuterLoop] // this test takes ~10 seconds on modern hardware since it operates over GBs of data public static void GetChars_Encoding_ReadOnlySequence_IBufferWriter_LargeMultiSegment() { ReadOnlySequence<byte> sequence = GetLargeRepeatingReadOnlySequence<byte>(AllScalarsAsUtf8, 1500); // ~ 6.5bn bytes of UTF-8 input RepeatingValidatingBufferWriter<char> writer = new RepeatingValidatingBufferWriter<char>(AllScalarsAsUtf16); long expectedCharsWritten = 1500 * (long)AllScalarsAsUtf16.Length; long actualCharsWritten = EncodingExtensions.GetChars(Encoding.UTF8, sequence, writer); Assert.Equal(expectedCharsWritten, actualCharsWritten); Assert.Equal(expectedCharsWritten, writer.TotalElementsWritten); // our writer will validate as data is written to it } [Fact] public static void GetChars_Encoding_ReadOnlySequence_IBufferWriter_ParamChecks() { ReadOnlySequence<byte> sequence = new ReadOnlySequence<byte>(new byte[0]); IBufferWriter<char> writer = new ArrayBufferWriter<char>(); Assert.Throws<ArgumentNullException>("encoding", () => EncodingExtensions.GetChars((Encoding)null, sequence, writer)); Assert.Throws<ArgumentNullException>("writer", () => EncodingExtensions.GetChars(Encoding.UTF8, sequence, (IBufferWriter<char>)null)); } [Fact] public static void GetChars_Encoding_ReadOnlySequence_Span() { Span<char> destination = stackalloc char[32]; // First try the single-segment code path. ReadOnlySequence<byte> sequence = new ReadOnlySequence<byte>(Encoding.UTF8.GetBytes("Hello!")); Assert.Equal("Hello!", destination.Slice(0, EncodingExtensions.GetChars(Encoding.UTF8, sequence, destination)).ToString()); // Next try the multi-segment code path. // We've intentionally split multi-byte subsequences here to test flushing mechanisms. sequence = SequenceFactory.Create( new byte[] { 0x20 }, // U+0020 new byte[] { 0x61, 0xC2 }, // U+0061 and U+0080 (continues on next line) new byte[] { 0x80, 0xED }, // (cont.) + U+D7FF (continues on next line) new byte[] { }, // empty segment, just to make sure we handle it correctly new byte[] { 0x9F, 0xBF, 0xF4, 0x80 }, // (cont.) + U+100000 (continues on next line) new byte[] { 0x80, 0x80 }, // (cont.) new byte[] { 0xC2 }); // leftover data (should be replaced) Assert.Equal("\u0020\u0061\u0080\ud7ff\U00100000\ufffd", destination.Slice(0, EncodingExtensions.GetChars(Encoding.UTF8, sequence, destination)).ToString()); } [Fact] public static void GetChars_Encoding_ReadOnlySequence_Span_ParamChecks() { ReadOnlySequence<byte> sequence = new ReadOnlySequence<byte>(new byte[0]); Assert.Throws<ArgumentNullException>("encoding", () => EncodingExtensions.GetChars((Encoding)null, sequence, Span<char>.Empty)); } [Fact] public static void GetChars_Encoding_ReadOnlySpan_IBufferWriter_ParamChecks() { IBufferWriter<char> writer = new ArrayBufferWriter<char>(); Assert.Throws<ArgumentNullException>("encoding", () => EncodingExtensions.GetChars((Encoding)null, ReadOnlySpan<byte>.Empty, writer)); Assert.Throws<ArgumentNullException>("writer", () => EncodingExtensions.GetChars(Encoding.UTF8, ReadOnlySpan<byte>.Empty, (IBufferWriter<char>)null)); } [Fact] public static void GetChars_Encoding_ReadOnlySpan_IBufferWriter() { ArrayBufferWriter<char> writer = new ArrayBufferWriter<char>(); // First, a small input that goes through the one-shot code path. ReadOnlySpan<byte> inputData = Encoding.UTF8.GetBytes("Hello"); long charsWritten = EncodingExtensions.GetChars(Encoding.UTF8, inputData, writer); Assert.Equal(5, charsWritten); Assert.Equal("Hello", writer.WrittenSpan.ToString()); // Then, a large input that goes through the chunked path. // We use U+1234 because it's a 3-byte UTF-8 sequence, which means it'll be split in // several locations by the internal GetBytes chunking logic. This helps us test // that we're flowing the 'flush' parameter through the system correctly. writer = new ArrayBufferWriter<char>(); inputData = Encoding.UTF8.GetBytes(new string('\u1234', 5_000_000)).Concat(new byte[] { 0xE0 }).ToArray(); charsWritten = EncodingExtensions.GetChars(Encoding.UTF8, inputData, writer); Assert.Equal(5_000_001, charsWritten); // 5 MM for data, 1 for replacement char at end // Now make sure all of the data was decoded properly. Assert.Equal( expected: new string('\u1234', 5_000_000) + '\ufffd', actual: writer.WrittenSpan.ToString()); } /// <summary> /// Returns a <see cref="ReadOnlySequence{T}"/> consisting of <paramref name="dataToRepeat"/> repeated <paramref name="repetitionCount"/> times. /// This can be used to produce a sequence consisting of billions of elements while consuming a fraction of that memory. /// </summary> /// <returns></returns> private static ReadOnlySequence<T> GetLargeRepeatingReadOnlySequence<T>(ReadOnlyMemory<T> dataToRepeat, int repetitionCount) { const int MAX_SEGMENT_LENGTH = 300_007; // a prime number, which ensures we'll have some multi-byte / multi-char splits if the data is long MockSequenceSegment<T> firstSegment = null; MockSequenceSegment<T> previousSegment = null; MockSequenceSegment<T> lastSegment = null; long runningTotalLength = 0; for (int i = 0; i < repetitionCount; i++) { ReadOnlyMemory<T> remainingData = dataToRepeat; while (!remainingData.IsEmpty) { int thisSegmentLength = Math.Min(remainingData.Length, MAX_SEGMENT_LENGTH); lastSegment = new MockSequenceSegment<T> { Memory = remainingData.Slice(0, thisSegmentLength), RunningIndex = runningTotalLength }; if (previousSegment != null) { previousSegment.Next = lastSegment; } previousSegment = lastSegment; if (firstSegment == null) { firstSegment = lastSegment; } remainingData = remainingData.Slice(thisSegmentLength); runningTotalLength += thisSegmentLength; } } return new ReadOnlySequence<T>(firstSegment, 0, lastSegment, lastSegment.Memory.Length); } /// <summary> /// An <see cref="IBufferWriter{T}"/> that validates that the data written to it consists of 'knownGoodData' repeated indefinitely. /// </summary> private class RepeatingValidatingBufferWriter<T> : IBufferWriter<T> where T : unmanaged, IEquatable<T> { private T[] _buffer; private readonly ReadOnlyMemory<T> _knownGoodData; public long TotalElementsWritten { get; private set; } public RepeatingValidatingBufferWriter(ReadOnlyMemory<T> knownGoodData) { Assert.False(knownGoodData.IsEmpty); _knownGoodData = knownGoodData; } public void Advance(int count) { ReadOnlySpan<T> bufferSpan = _buffer.AsSpan(0, count); ReadOnlySpan<T> remainingGoodDataSpan = _knownGoodData.Span.Slice((int)(TotalElementsWritten % _knownGoodData.Length)); while (!bufferSpan.IsEmpty) { int compareLength = Math.Min(bufferSpan.Length, remainingGoodDataSpan.Length); Assert.True(remainingGoodDataSpan.Slice(0, compareLength).SequenceEqual(bufferSpan.Slice(0, compareLength))); remainingGoodDataSpan = remainingGoodDataSpan.Slice(compareLength); if (remainingGoodDataSpan.IsEmpty) { remainingGoodDataSpan = _knownGoodData.Span; } bufferSpan = bufferSpan.Slice(compareLength); } TotalElementsWritten += count; } public Memory<T> GetMemory(int sizeHint) => throw new NotImplementedException(); public Span<T> GetSpan(int sizeHint) { if (_buffer is null || sizeHint > _buffer.Length) { _buffer = new T[Math.Max(sizeHint, 128)]; } return _buffer; } } /// <summary> /// A <see cref="ReadOnlySequenceSegment{T}"/> where all members are public. /// </summary> private sealed class MockSequenceSegment<T> : ReadOnlySequenceSegment<T> { public new ReadOnlyMemory<T> Memory { get => base.Memory; set => base.Memory = value; } public new ReadOnlySequenceSegment<T> Next { get => base.Next; set => base.Next = value; } public new long RunningIndex { get => base.RunningIndex; set => base.RunningIndex = value; } } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. // using System; public struct ValX0 { } public struct ValY0 { } public struct ValX1<T> { } public struct ValY1<T> { } public struct ValX2<T, U> { } public struct ValY2<T, U> { } public struct ValX3<T, U, V> { } public struct ValY3<T, U, V> { } public class RefX0 { } public class RefY0 { } public class RefX1<T> { } public class RefY1<T> { } public class RefX2<T, U> { } public class RefY2<T, U> { } public class RefX3<T, U, V> { } public class RefY3<T, U, V> { } public interface IGen<T> { void _Init(T fld1); bool InstVerify(System.Type t1); } public interface IGenSub<T> : IGen<T> { } public class GenInt : IGenSub<int> { int Fld1; public void _Init(int fld1) { Fld1 = fld1; } public bool InstVerify(System.Type t1) { bool result = true; if (!(Fld1.GetType().Equals(t1))) { result = false; Console.WriteLine("Failed to verify type of Fld1 in: " + typeof(IGen<int>)); } return result; } } public class GenDouble : IGenSub<double> { double Fld1; public void _Init(double fld1) { Fld1 = fld1; } public bool InstVerify(System.Type t1) { bool result = true; if (!(Fld1.GetType().Equals(t1))) { result = false; Console.WriteLine("Failed to verify type of Fld1 in: " + typeof(IGen<double>)); } return result; } } public class GenString : IGenSub<String> { string Fld1; public void _Init(string fld1) { Fld1 = fld1; } public bool InstVerify(System.Type t1) { bool result = true; if (!(Fld1.GetType().Equals(t1))) { result = false; Console.WriteLine("Failed to verify type of Fld1 in: " + typeof(IGen<string>)); } return result; } } public class GenObject : IGenSub<object> { object Fld1; public void _Init(object fld1) { Fld1 = fld1; } public bool InstVerify(System.Type t1) { bool result = true; if (!(Fld1.GetType().Equals(t1))) { result = false; Console.WriteLine("Failed to verify type of Fld1 in: " + typeof(IGen<object>)); } return result; } } public class GenGuid : IGenSub<Guid> { Guid Fld1; public void _Init(Guid fld1) { Fld1 = fld1; } public bool InstVerify(System.Type t1) { bool result = true; if (!(Fld1.GetType().Equals(t1))) { result = false; Console.WriteLine("Failed to verify type of Fld1 in: " + typeof(IGen<Guid>)); } return result; } } public class GenConstructedReference : IGenSub<RefX1<int>> { RefX1<int> Fld1; public void _Init(RefX1<int> fld1) { Fld1 = fld1; } public bool InstVerify(System.Type t1) { bool result = true; if (!(Fld1.GetType().Equals(t1))) { result = false; Console.WriteLine("Failed to verify type of Fld1 in: " + typeof(IGen<RefX1<int>>)); } return result; } } public class GenConstructedValue : IGenSub<ValX1<string>> { ValX1<string> Fld1; public void _Init(ValX1<string> fld1) { Fld1 = fld1; } public bool InstVerify(System.Type t1) { bool result = true; if (!(Fld1.GetType().Equals(t1))) { result = false; Console.WriteLine("Failed to verify type of Fld1 in: " + typeof(IGen<ValX1<string>>)); } return result; } } public class Gen1DIntArray : IGenSub<int[]> { int[] Fld1; public void _Init(int[] fld1) { Fld1 = fld1; } public bool InstVerify(System.Type t1) { bool result = true; if (!(Fld1.GetType().Equals(t1))) { result = false; Console.WriteLine("Failed to verify type of Fld1 in: " + typeof(IGen<int[]>)); } return result; } } public class Gen2DStringArray : IGenSub<string[,]> { string[,] Fld1; public void _Init(string[,] fld1) { Fld1 = fld1; } public bool InstVerify(System.Type t1) { bool result = true; if (!(Fld1.GetType().Equals(t1))) { result = false; Console.WriteLine("Failed to verify type of Fld1 in: " + typeof(IGen<string[,]>)); } return result; } } public class GenJaggedObjectArray : IGenSub<object[][]> { object[][] Fld1; public void _Init(object[][] fld1) { Fld1 = fld1; } public bool InstVerify(System.Type t1) { bool result = true; if (!(Fld1.GetType().Equals(t1))) { result = false; Console.WriteLine("Failed to verify type of Fld1 in: " + typeof(IGen<object[][]>)); } return result; } } public class Test { public static int counter = 0; public static bool result = true; public static void Eval(bool exp) { counter++; if (!exp) { result = exp; Console.WriteLine("Test Failed at location: " + counter); } } public static int Main() { IGen<int> IGenInt = new GenInt(); IGenInt._Init(new int()); Eval(IGenInt.InstVerify(typeof(int))); IGen<double> IGenDouble = new GenDouble(); IGenDouble._Init(new double()); Eval(IGenDouble.InstVerify(typeof(double))); IGen<string> IGenString = new GenString(); IGenString._Init("string"); Eval(IGenString.InstVerify(typeof(string))); IGen<object> IGenObject = new GenObject(); IGenObject._Init(new object()); Eval(IGenObject.InstVerify(typeof(object))); IGen<Guid> IGenGuid = new GenGuid(); IGenGuid._Init(new Guid()); Eval(IGenGuid.InstVerify(typeof(Guid))); IGen<RefX1<int>> IGenConstructedReference = new GenConstructedReference(); IGenConstructedReference._Init(new RefX1<int>()); Eval(IGenConstructedReference.InstVerify(typeof(RefX1<int>))); IGen<ValX1<string>> IGenConstructedValue = new GenConstructedValue(); IGenConstructedValue._Init(new ValX1<string>()); Eval(IGenConstructedValue.InstVerify(typeof(ValX1<string>))); IGen<int[]> IGen1DIntArray = new Gen1DIntArray(); IGen1DIntArray._Init(new int[1]); Eval(IGen1DIntArray.InstVerify(typeof(int[]))); IGen<string[,]> IGen2DStringArray = new Gen2DStringArray(); IGen2DStringArray._Init(new string[1, 1]); Eval(IGen2DStringArray.InstVerify(typeof(string[,]))); IGen<object[][]> IGenJaggedObjectArray = new GenJaggedObjectArray(); IGenJaggedObjectArray._Init(new object[1][]); Eval(IGenJaggedObjectArray.InstVerify(typeof(object[][]))); if (result) { Console.WriteLine("Test Passed"); return 100; } else { Console.WriteLine("Test Failed"); return 1; } } }
using NUnit.Framework; using static NSelene.Selene; namespace NSelene.Tests.Integration.SharedDriver.SeleneSpec { using System; using System.Linq; using System.Reflection; using Harness; using OpenQA.Selenium; [TestFixture] public class SeleneElement_Type_Specs : BaseTest { // TODO: should we cover cases when Type applied to field with cursor in the middle? // TODO: move here some TypeByJs tests [Test] public void Type_WaitsForVisibility_OfInitiialyAbsent() { Configuration.Timeout = 0.7; // bigger than for other actions, because we simulate typing all keys... Configuration.PollDuringWaits = 0.1; Given.OpenedEmptyPage(); var beforeCall = DateTime.Now; Given.OpenedPageWithBodyTimedOut( @" <input value='before '></input> ", 300 ); S("input").Type("and after"); var afterCall = DateTime.Now; Assert.AreEqual( "before and after", Configuration.Driver .FindElement(By.TagName("input")).GetAttribute("value") ); Assert.AreEqual( "before and after", Configuration.Driver .FindElement(By.TagName("input")).GetProperty("value") ); Assert.Greater(afterCall, beforeCall.AddSeconds(0.3)); Assert.Less(afterCall, beforeCall.AddSeconds(0.7)); } [Test] public void Type_IsRenderedInError_OnAbsentElementFailure() { Configuration.Timeout = 0.25; Configuration.PollDuringWaits = 0.1; Given.OpenedEmptyPage(); try { S("input").Type("and after"); } catch (TimeoutException error) { // TODO: shoud we check timing here too? var lines = error.Message.Split("\n").Select( item => item.Trim() ).ToList(); Assert.Contains("Timed out after 0.25s, while waiting for:", lines); Assert.Contains("Browser.Element(input).ActualNotOverlappedWebElement.SendKeys(and after)", lines); Assert.Contains("Reason:", lines); Assert.Contains( "no such element: Unable to locate element: " + "{\"method\":\"css selector\",\"selector\":\"input\"}" , lines ); } } [Test] public void Type_FailsOnHiddenInputOfTypeFile() { Configuration.Timeout = 0.25; Configuration.PollDuringWaits = 0.1; Given.OpenedPageWithBody( @" <input type='file' style='display:none'></input> " ); var path = new Uri( new Uri(Assembly.GetExecutingAssembly().Location), "../../../Resources/empty.html" // TODO: use ./empty.html (tune csproj correspondingly) ).AbsolutePath; try { S("[type=file]").Type(path); } catch (TimeoutException error) { var lines = error.Message.Split("\n").Select( item => item.Trim() ).ToList(); Assert.Contains("Timed out after 0.25s, while waiting for:", lines); Assert.Contains($"Browser.Element([type=file]).ActualNotOverlappedWebElement.SendKeys({path})", lines); Assert.Contains("Reason:", lines); Assert.Contains("javascript error: element is not visible", lines); Assert.AreEqual( "", Configuration.Driver .FindElement(By.TagName("input")).GetAttribute("value") ); Assert.AreEqual( "", Configuration.Driver .FindElement(By.TagName("input")).GetProperty("value") ); } } [Test] public void Type_WaitsForVisibility_OfInitialyHidden() { Configuration.Timeout = 0.7; Configuration.PollDuringWaits = 0.1; Given.OpenedPageWithBody( @" <input value='before ' style='display:none'></input> " ); var beforeCall = DateTime.Now; Given.ExecuteScriptWithTimeout( @" document.getElementsByTagName('input')[0].style.display = 'block'; ", 300 ); S("input").Type("and after"); var afterCall = DateTime.Now; Assert.AreEqual( "before and after", Configuration.Driver .FindElement(By.TagName("input")).GetAttribute("value") ); Assert.AreEqual( "before and after", Configuration.Driver .FindElement(By.TagName("input")).GetProperty("value") ); Assert.Greater(afterCall, beforeCall.AddSeconds(0.3)); Assert.Less(afterCall, beforeCall.AddSeconds(0.7)); } [Test] public void Type_IsRenderedInError_OnHiddenElementFailure() { Configuration.Timeout = 0.25; Configuration.PollDuringWaits = 0.1; Given.OpenedPageWithBody( @" <input value='before ' style='display:none'></input> " ); try { S("input").Type("and after"); } catch (TimeoutException error) { var lines = error.Message.Split("\n").Select( item => item.Trim() ).ToList(); Assert.Contains("Timed out after 0.25s, while waiting for:", lines); Assert.Contains("Browser.Element(input).ActualNotOverlappedWebElement.SendKeys(and after)", lines); Assert.Contains("Reason:", lines); Assert.Contains("element not interactable", lines); Assert.AreEqual( "before ", Configuration.Driver .FindElement(By.TagName("input")).GetAttribute("value") ); Assert.AreEqual( "before ", Configuration.Driver .FindElement(By.TagName("input")).GetProperty("value") ); } } [Test] public void Type_IsRenderedInError_OnHiddenElementFailure_WhenCustomizedToWaitForNoOverlapFoundByJs() { Configuration.Timeout = 0.25; Configuration.PollDuringWaits = 0.1; Given.OpenedPageWithBody( @" <input value='before ' style='display:none'></input> " ); try { S("input").With(waitForNoOverlapFoundByJs: true).Type("and after"); } catch (TimeoutException error) { var lines = error.Message.Split("\n").Select( item => item.Trim() ).ToList(); Assert.Contains("Timed out after 0.25s, while waiting for:", lines); Assert.Contains("Browser.Element(input).ActualNotOverlappedWebElement.SendKeys(and after)", lines); Assert.Contains("Reason:", lines); Assert.Contains("javascript error: element is not visible", lines); Assert.AreEqual( "before ", Configuration.Driver .FindElement(By.TagName("input")).GetAttribute("value") ); Assert.AreEqual( "before ", Configuration.Driver .FindElement(By.TagName("input")).GetProperty("value") ); } } [Test] public void Type_WorksUnderOverlay_ByDefault() { Configuration.Timeout = 1.0; Configuration.PollDuringWaits = 0.1; Given.OpenedPageWithBody( @" <div id='overlay' style=' display:block; position: fixed; display: block; width: 100%; height: 100%; top: 0; left: 0; right: 0; bottom: 0; background-color: rgba(0,0,0,0.1); z-index: 2; cursor: pointer; ' > </div> <input value='before '></input> " ); var beforeCall = DateTime.Now; S("input").Type("and after"); var afterCall = DateTime.Now; Assert.Less(afterCall, beforeCall.AddSeconds(0.5)); Assert.AreEqual( "before and after", Configuration.Driver .FindElement(By.TagName("input")).GetAttribute("value") ); Assert.AreEqual( "before and after", Configuration.Driver .FindElement(By.TagName("input")).GetProperty("value") ); } [Test] public void Type_WaitsForNoOverlay_IfExplicitelyCustomized() { Configuration.Timeout = 1.0; Configuration.PollDuringWaits = 0.1; Given.OpenedPageWithBody( @" <div id='overlay' style=' display:block; position: fixed; display: block; width: 100%; height: 100%; top: 0; left: 0; right: 0; bottom: 0; background-color: rgba(0,0,0,0.1); z-index: 2; cursor: pointer; ' > </div> <input value='before '></input> " ); var beforeCall = DateTime.Now; Given.ExecuteScriptWithTimeout( @" document.getElementById('overlay').style.display = 'none'; ", 300 ); S("input").With(waitForNoOverlapFoundByJs: true).Type("and after"); var afterCall = DateTime.Now; Assert.Greater(afterCall, beforeCall.AddSeconds(0.3)); Assert.Less(afterCall, beforeCall.AddSeconds(1.0)); Assert.AreEqual( "before and after", Configuration.Driver .FindElement(By.TagName("input")).GetAttribute("value") ); Assert.AreEqual( "before and after", Configuration.Driver .FindElement(By.TagName("input")).GetProperty("value") ); } [Test] public void Type_IsRenderedInError_OnOverlappedWithOverlayFailure() { Configuration.Timeout = 0.25; Configuration.PollDuringWaits = 0.1; Given.OpenedPageWithBody( @" <div id='overlay' style=' display: block; position: fixed; display: block; width: 100%; height: 100%; top: 0; left: 0; right: 0; bottom: 0; background-color: rgba(0,0,0,0.1); z-index: 2; cursor: pointer; ' > </div> <input value='before '></input> " ); try { S("input").With(waitForNoOverlapFoundByJs: true).Type("and after"); } catch (TimeoutException error) { var lines = error.Message.Split("\n").Select( item => item.Trim() ).ToList(); Assert.Contains("Timed out after 0.25s, while waiting for:", lines); Assert.Contains("Browser.Element(input).ActualNotOverlappedWebElement.SendKeys(and after)", lines); Assert.Contains("Reason:", lines); Assert.Contains("Element: <input value=\"before \">", lines); Assert.NotNull(lines.Find(item => item.Contains( "is overlapped by: <div id=\"overlay\" " ))); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Linq; using System.Runtime.InteropServices; using Xunit; namespace System.IO.Tests { public class Directory_Exists : FileSystemTest { #region Utilities public bool Exists(string path) { return Directory.Exists(path); } #endregion #region UniversalTests [Fact] public void NullAsPath_ReturnsFalse() { Assert.False(Exists(null)); } [Fact] public void EmptyAsPath_ReturnsFalse() { Assert.False(Exists(string.Empty)); } [Fact] public void NonExistentValidPath_ReturnsFalse() { Assert.All((IOInputs.GetValidPathComponentNames()), (path) => { Assert.False(Exists(path), path); }); } [Fact] public void ValidPathExists_ReturnsTrue() { Assert.All((IOInputs.GetValidPathComponentNames()), (component) => { string path = Path.Combine(TestDirectory, component); DirectoryInfo testDir = Directory.CreateDirectory(path); Assert.True(Exists(path)); }); } [Theory, MemberData(nameof(PathsWithInvalidCharacters))] public void PathWithInvalidCharactersAsPath_ReturnsFalse(string invalidPath) { // Checks that errors aren't thrown when calling Exists() on paths with impossible to create characters char[] trimmed = { (char)0x9, (char)0xA, (char)0xB, (char)0xC, (char)0xD, (char)0x20, (char)0x85, (char)0xA0 }; Assert.False(Exists(invalidPath)); if (!trimmed.Contains(invalidPath.ToCharArray()[0])) Assert.False(Exists(TestDirectory + Path.DirectorySeparatorChar + invalidPath)); } [Fact] public void PathAlreadyExistsAsFile() { string path = GetTestFilePath(); File.Create(path).Dispose(); Assert.False(Exists(IOServices.RemoveTrailingSlash(path))); Assert.False(Exists(IOServices.RemoveTrailingSlash(IOServices.RemoveTrailingSlash(path)))); Assert.False(Exists(IOServices.RemoveTrailingSlash(IOServices.AddTrailingSlashIfNeeded(path)))); } [Fact] public void PathAlreadyExistsAsDirectory() { string path = GetTestFilePath(); DirectoryInfo testDir = Directory.CreateDirectory(path); Assert.True(Exists(IOServices.RemoveTrailingSlash(path))); Assert.True(Exists(IOServices.RemoveTrailingSlash(IOServices.RemoveTrailingSlash(path)))); Assert.True(Exists(IOServices.RemoveTrailingSlash(IOServices.AddTrailingSlashIfNeeded(path)))); } [Fact] public void DotAsPath_ReturnsTrue() { Assert.True(Exists(Path.Combine(TestDirectory, "."))); } [Fact] public void DirectoryGetCurrentDirectoryAsPath_ReturnsTrue() { Assert.True(Exists(Directory.GetCurrentDirectory())); } [Fact] public void DotDotAsPath_ReturnsTrue() { Assert.True(Exists(Path.Combine(TestDirectory, GetTestFileName(), ".."))); } [Fact] public void DirectoryLongerThanMaxLongPath_DoesntThrow() { Assert.All((IOInputs.GetPathsLongerThanMaxLongPath(GetTestFilePath())), (path) => { Assert.False(Exists(path), path); }); } [ConditionalFact(nameof(CanCreateSymbolicLinks))] public void SymLinksMayExistIndependentlyOfTarget() { var path = GetTestFilePath(); var linkPath = GetTestFilePath(); Directory.CreateDirectory(path); Assert.True(MountHelper.CreateSymbolicLink(linkPath, path, isDirectory: true)); // Both the symlink and the target exist Assert.True(Directory.Exists(path), "path should exist"); Assert.True(Directory.Exists(linkPath), "linkPath should exist"); Assert.False(File.Exists(linkPath)); // Delete the target. The symlink should still exist. On Unix, the symlink will now be // considered a file (since it's broken and we don't know what it'll eventually point to). Directory.Delete(path); Assert.False(Directory.Exists(path), "path should now not exist"); if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { Assert.True(Directory.Exists(linkPath), "linkPath should still exist as a directory"); Assert.False(File.Exists(linkPath), "linkPath should not be a file"); } else { Assert.False(Directory.Exists(linkPath), "linkPath should no longer be a directory"); Assert.True(File.Exists(linkPath), "linkPath should now be a file"); } // Now delete the symlink. // On Unix, deleting the symlink should fail, because it's not a directory, it's a file. // On Windows, it should succeed. try { Directory.Delete(linkPath); Assert.True(RuntimeInformation.IsOSPlatform(OSPlatform.Windows), "Should only succeed on Windows"); } catch (IOException) { Assert.False(RuntimeInformation.IsOSPlatform(OSPlatform.Windows), "Should only fail on Unix"); File.Delete(linkPath); } Assert.False(Directory.Exists(linkPath), "linkPath should no longer exist as a directory"); Assert.False(File.Exists(linkPath), "linkPath should no longer exist as a file"); } [ConditionalFact(nameof(CanCreateSymbolicLinks))] public void SymlinkToNewDirectory() { string path = GetTestFilePath(); Directory.CreateDirectory(path); string linkPath = GetTestFilePath(); Assert.True(MountHelper.CreateSymbolicLink(linkPath, path, isDirectory: true)); Assert.True(Directory.Exists(path)); Assert.True(Directory.Exists(linkPath)); } #endregion #region PlatformSpecific [ConditionalFact(nameof(UsingNewNormalization))] [ActiveIssue(20117, TargetFrameworkMonikers.Uap)] [PlatformSpecific(TestPlatforms.Windows)] // Extended path exists public void ValidExtendedPathExists_ReturnsTrue() { Assert.All((IOInputs.GetValidPathComponentNames()), (component) => { string path = IOInputs.ExtendedPrefix + Path.Combine(TestDirectory, "extended", component); DirectoryInfo testDir = Directory.CreateDirectory(path); Assert.True(Exists(path)); }); } [ConditionalFact(nameof(UsingNewNormalization))] [ActiveIssue(20117, TargetFrameworkMonikers.Uap)] [PlatformSpecific(TestPlatforms.Windows)] // Extended path already exists as file public void ExtendedPathAlreadyExistsAsFile() { string path = IOInputs.ExtendedPrefix + GetTestFilePath(); File.Create(path).Dispose(); Assert.False(Exists(IOServices.RemoveTrailingSlash(path))); Assert.False(Exists(IOServices.RemoveTrailingSlash(IOServices.RemoveTrailingSlash(path)))); Assert.False(Exists(IOServices.RemoveTrailingSlash(IOServices.AddTrailingSlashIfNeeded(path)))); } [ConditionalFact(nameof(UsingNewNormalization))] [ActiveIssue(20117, TargetFrameworkMonikers.Uap)] [PlatformSpecific(TestPlatforms.Windows)] // Extended path already exists as directory public void ExtendedPathAlreadyExistsAsDirectory() { string path = IOInputs.ExtendedPrefix + GetTestFilePath(); DirectoryInfo testDir = Directory.CreateDirectory(path); Assert.True(Exists(IOServices.RemoveTrailingSlash(path))); Assert.True(Exists(IOServices.RemoveTrailingSlash(IOServices.RemoveTrailingSlash(path)))); Assert.True(Exists(IOServices.RemoveTrailingSlash(IOServices.AddTrailingSlashIfNeeded(path)))); } [ConditionalFact(nameof(AreAllLongPathsAvailable))] [ActiveIssue(20117, TargetFrameworkMonikers.Uap)] [PlatformSpecific(TestPlatforms.Windows)] // Long directory path doesn't throw on Exists public void DirectoryLongerThanMaxDirectoryAsPath_DoesntThrow() { Assert.All((IOInputs.GetPathsLongerThanMaxDirectory(GetTestFilePath())), (path) => { Assert.False(Exists(path)); }); } [Fact] [PlatformSpecific(TestPlatforms.Windows)] // Unix equivalent tested already in CreateDirectory public void WindowsWhiteSpaceAsPath_ReturnsFalse() { // Checks that errors aren't thrown when calling Exists() on impossible paths Assert.All(IOInputs.GetWhiteSpace(), (component) => { Assert.False(Exists(component)); }); } [Fact] [PlatformSpecific(CaseInsensitivePlatforms)] public void DoesCaseInsensitiveInvariantComparisons() { DirectoryInfo testDir = Directory.CreateDirectory(GetTestFilePath()); Assert.True(Exists(testDir.FullName)); Assert.True(Exists(testDir.FullName.ToUpperInvariant())); Assert.True(Exists(testDir.FullName.ToLowerInvariant())); } [Fact] [PlatformSpecific(CaseSensitivePlatforms)] public void DoesCaseSensitiveComparisons() { DirectoryInfo testDir = Directory.CreateDirectory(GetTestFilePath()); Assert.True(Exists(testDir.FullName)); Assert.False(Exists(testDir.FullName.ToUpperInvariant())); Assert.False(Exists(testDir.FullName.ToLowerInvariant())); } [ConditionalFact(nameof(UsingNewNormalization))] [ActiveIssue(20117, TargetFrameworkMonikers.Uap)] [PlatformSpecific(TestPlatforms.Windows)] // In Windows, trailing whitespace in a path is trimmed appropriately public void TrailingWhitespaceExistence() { // This test relies on \\?\ support DirectoryInfo testDir = Directory.CreateDirectory(GetTestFilePath()); Assert.All(IOInputs.GetWhiteSpace(), (component) => { string path = testDir.FullName + component; Assert.True(Exists(path), path); // string concat in case Path.Combine() trims whitespace before Exists gets to it Assert.False(Exists(IOInputs.ExtendedPrefix + path), path); }); Assert.All(IOInputs.GetSimpleWhiteSpace(), (component) => { string path = GetTestFilePath(memberName: "Extended") + component; testDir = Directory.CreateDirectory(IOInputs.ExtendedPrefix + path); Assert.False(Exists(path), path); Assert.True(Exists(testDir.FullName)); }); } [Fact] [PlatformSpecific(TestPlatforms.Windows)] // alternate data stream public void PathWithAlternateDataStreams_ReturnsFalse() { Assert.All(IOInputs.GetWhiteSpace(), (component) => { Assert.False(Exists(component)); }); } [Fact] [OuterLoop] [PlatformSpecific(TestPlatforms.Windows)] // device names public void PathWithReservedDeviceNameAsPath_ReturnsFalse() { Assert.All((IOInputs.GetPathsWithReservedDeviceNames()), (component) => { Assert.False(Exists(component)); }); } [Fact] public void UncPathWithoutShareNameAsPath_ReturnsFalse() { Assert.All((IOInputs.GetUncPathsWithoutShareName()), (component) => { Assert.False(Exists(component)); }); } [Fact] [PlatformSpecific(TestPlatforms.Windows)] // max directory length not fixed on Unix public void DirectoryEqualToMaxDirectory_ReturnsTrue() { // Creates directories up to the maximum directory length all at once DirectoryInfo testDir = Directory.CreateDirectory(GetTestFilePath()); PathInfo path = IOServices.GetPath(testDir.FullName, IOInputs.MaxDirectory, maxComponent: 10); Directory.CreateDirectory(path.FullPath); Assert.True(Exists(path.FullPath)); } [Fact] [PlatformSpecific(TestPlatforms.Windows)] // max directory length not fixed on Unix public void DirectoryWithComponentLongerThanMaxComponentAsPath_ReturnsFalse() { Assert.All((IOInputs.GetPathsWithComponentLongerThanMaxComponent()), (component) => { Assert.False(Exists(component)); }); } [Fact] [ActiveIssue(1221)] [PlatformSpecific(TestPlatforms.Windows)] // drive labels public void NotReadyDriveAsPath_ReturnsFalse() { var drive = IOServices.GetNotReadyDrive(); if (drive == null) { Console.WriteLine("Skipping test. Unable to find a not-ready drive, such as CD-Rom with no disc inserted."); return; } bool result = Exists(drive); Assert.False(result); } [Fact] [ActiveIssue(1221)] [PlatformSpecific(TestPlatforms.Windows)] // drive labels public void SubdirectoryOnNotReadyDriveAsPath_ReturnsFalse() { var drive = IOServices.GetNotReadyDrive(); if (drive == null) { Console.WriteLine("Skipping test. Unable to find a not-ready drive, such as CD-Rom with no disc inserted."); return; } bool result = Exists(Path.Combine(drive, "Subdirectory")); Assert.False(result); } [Fact] [PlatformSpecific(TestPlatforms.Windows)] // drive labels public void NonExistentDriveAsPath_ReturnsFalse() { Assert.False(Exists(IOServices.GetNonExistentDrive())); } [Fact] [PlatformSpecific(TestPlatforms.Windows)] // drive labels public void SubdirectoryOnNonExistentDriveAsPath_ReturnsFalse() { Assert.False(Exists(Path.Combine(IOServices.GetNonExistentDrive(), "nonexistentsubdir"))); } [Fact] [PlatformSpecific(TestPlatforms.AnyUnix)] // Makes call to native code (libc) public void FalseForNonRegularFile() { string fileName = GetTestFilePath(); Assert.Equal(0, mkfifo(fileName, 0)); Assert.False(Directory.Exists(fileName)); } #endregion } }
using System; using System.Collections.Specialized; using System.Runtime; namespace System.Management { public class SelectQuery : WqlObjectQuery { private bool isSchemaQuery; private string className; private string condition; private StringCollection selectedProperties; public string ClassName { get { if (this.className != null) { return this.className; } else { return string.Empty; } } set { this.className = value; this.BuildQuery(); base.FireIdentifierChanged(); } } public string Condition { get { if (this.condition != null) { return this.condition; } else { return string.Empty; } } set { this.condition = value; this.BuildQuery(); base.FireIdentifierChanged(); } } public bool IsSchemaQuery { [TargetedPatchingOptOut("Performance critical to inline this type of method across NGen image boundaries")] get { return this.isSchemaQuery; } set { this.isSchemaQuery = value; this.BuildQuery(); base.FireIdentifierChanged(); } } public override string QueryString { get { this.BuildQuery(); return base.QueryString; } set { base.QueryString = value; } } public StringCollection SelectedProperties { [TargetedPatchingOptOut("Performance critical to inline this type of method across NGen image boundaries")] get { return this.selectedProperties; } set { if (value == null) { this.selectedProperties = new StringCollection(); } else { StringCollection stringCollections = value; StringCollection stringCollections1 = new StringCollection(); foreach (string str in stringCollections) { stringCollections1.Add(str); } this.selectedProperties = stringCollections1; } this.BuildQuery(); base.FireIdentifierChanged(); } } [TargetedPatchingOptOut("Performance critical to inline this type of method across NGen image boundaries")] public SelectQuery() : this(null) { } public SelectQuery(string queryOrClassName) { this.selectedProperties = new StringCollection(); if (queryOrClassName == null) { return; } else { if (!queryOrClassName.TrimStart(new char[0]).StartsWith(ManagementQuery.tokenSelect, StringComparison.OrdinalIgnoreCase)) { ManagementPath managementPath = new ManagementPath(queryOrClassName); if (!managementPath.IsClass || managementPath.NamespacePath.Length != 0) { throw new ArgumentException(RC.GetString("INVALID_QUERY"), "queryOrClassName"); } else { this.ClassName = queryOrClassName; return; } } else { this.QueryString = queryOrClassName; return; } } } [TargetedPatchingOptOut("Performance critical to inline this type of method across NGen image boundaries")] public SelectQuery(string className, string condition) : this(className, condition, null) { } public SelectQuery(string className, string condition, string[] selectedProperties) { this.isSchemaQuery = false; this.className = className; this.condition = condition; this.selectedProperties = new StringCollection(); if (selectedProperties != null) { this.selectedProperties.AddRange(selectedProperties); } this.BuildQuery(); } public SelectQuery(bool isSchemaQuery, string condition) { if (isSchemaQuery) { this.isSchemaQuery = true; this.className = null; this.condition = condition; this.selectedProperties = null; this.BuildQuery(); return; } else { throw new ArgumentException(RC.GetString("INVALID_QUERY"), "isSchemaQuery"); } } protected internal void BuildQuery() { string str; string str1; if (this.isSchemaQuery) { str = "select * from meta_class"; } else { if (this.className == null) { base.SetQueryString(string.Empty); } if (this.className == null || this.className.Length == 0) { return; } else { str = ManagementQuery.tokenSelect; if (this.selectedProperties == null || 0 >= this.selectedProperties.Count) { str = string.Concat(str, "* "); } else { int count = this.selectedProperties.Count; for (int i = 0; i < count; i++) { string str2 = str; string item = this.selectedProperties[i]; if (i == count - 1) { str1 = " "; } else { str1 = ","; } str = string.Concat(str2, item, str1); } } str = string.Concat(str, "from ", this.className); } } if (this.Condition != null && this.Condition.Length != 0) { str = string.Concat(str, " where ", this.condition); } base.SetQueryString(str); } public override object Clone() { string[] strArrays = null; if (this.selectedProperties != null) { int count = this.selectedProperties.Count; if (0 < count) { strArrays = new string[count]; this.selectedProperties.CopyTo(strArrays, 0); } } if (this.isSchemaQuery) { return new SelectQuery(true, this.condition); } else { return new SelectQuery(this.className, this.condition, strArrays); } } protected internal override void ParseQuery(string query) { string str; int num; this.className = null; this.condition = null; if (this.selectedProperties != null) { this.selectedProperties.Clear(); } string str1 = query.Trim(); bool flag = false; if (this.isSchemaQuery) { string str2 = "select"; if (str1.Length < str2.Length || string.Compare(str1, 0, str2, 0, str2.Length, StringComparison.OrdinalIgnoreCase) != 0) { throw new ArgumentException(RC.GetString("INVALID_QUERY"), "select"); } else { str1 = str1.Remove(0, str2.Length).TrimStart(null); if (str1.IndexOf('*', 0) == 0) { str1 = str1.Remove(0, 1).TrimStart(null); str2 = "from"; if (str1.Length < str2.Length || string.Compare(str1, 0, str2, 0, str2.Length, StringComparison.OrdinalIgnoreCase) != 0) { throw new ArgumentException(RC.GetString("INVALID_QUERY"), "from"); } else { str1 = str1.Remove(0, str2.Length).TrimStart(null); str2 = "meta_class"; if (str1.Length < str2.Length || string.Compare(str1, 0, str2, 0, str2.Length, StringComparison.OrdinalIgnoreCase) != 0) { throw new ArgumentException(RC.GetString("INVALID_QUERY"), "meta_class"); } else { str1 = str1.Remove(0, str2.Length).TrimStart(null); if (0 >= str1.Length) { this.condition = string.Empty; } else { str2 = "where"; if (str1.Length < str2.Length || string.Compare(str1, 0, str2, 0, str2.Length, StringComparison.OrdinalIgnoreCase) != 0) { throw new ArgumentException(RC.GetString("INVALID_QUERY"), "where"); } else { str1 = str1.Remove(0, str2.Length); if (str1.Length == 0 || !char.IsWhiteSpace(str1[0])) { throw new ArgumentException(RC.GetString("INVALID_QUERY")); } else { str1 = str1.TrimStart(null); this.condition = str1; } } } this.className = null; this.selectedProperties = null; } } } else { throw new ArgumentException(RC.GetString("INVALID_QUERY"), "*"); } } } else { string str3 = ManagementQuery.tokenSelect; if (str1.Length < str3.Length || string.Compare(str1, 0, str3, 0, str3.Length, StringComparison.OrdinalIgnoreCase) != 0) { throw new ArgumentException(RC.GetString("INVALID_QUERY")); } else { ManagementQuery.ParseToken(ref str1, str3, ref flag); if (str1[0] == '*') { str1 = str1.Remove(0, 1).TrimStart(null); } else { if (this.selectedProperties == null) { this.selectedProperties = new StringCollection(); } else { this.selectedProperties.Clear(); } while (true) { int num1 = str1.IndexOf(','); num = num1; if (num1 <= 0) { break; } str = str1.Substring(0, num); str1 = str1.Remove(0, num + 1).TrimStart(null); str = str.Trim(); if (str.Length > 0) { this.selectedProperties.Add(str); } } int num2 = str1.IndexOf(' '); num = num2; if (num2 <= 0) { throw new ArgumentException(RC.GetString("INVALID_QUERY")); } else { str = str1.Substring(0, num); str1 = str1.Remove(0, num).TrimStart(null); this.selectedProperties.Add(str); } } str3 = "from "; flag = false; if (str1.Length < str3.Length || string.Compare(str1, 0, str3, 0, str3.Length, StringComparison.OrdinalIgnoreCase) != 0) { throw new ArgumentException(RC.GetString("INVALID_QUERY")); } else { ManagementQuery.ParseToken(ref str1, str3, null, ref flag, ref this.className); str3 = "where "; if (str1.Length >= str3.Length && string.Compare(str1, 0, str3, 0, str3.Length, StringComparison.OrdinalIgnoreCase) == 0) { this.condition = str1.Substring(str3.Length).Trim(); return; } } } } } } }
using System; using System.Collections.Generic; using System.ComponentModel; using System.Globalization; using System.Linq; namespace Tharga.Toolkit.Console.Command.Base { internal class InputManager { private readonly ICommandBase _commandBase; private readonly string _paramName; private readonly IConsole _console; private static readonly Dictionary<string, List<string>> _commandHistory = new Dictionary<string, List<string>>(); private int _commandHistoryIndex = -1; private Location _startLocation; private int _tabIndex = -1; //TODO: Theese two properties are the uggliest thing. What can I do to remove them? private static int _currentBufferLineCount; private static int _cursorLineOffset; public static int CurrentBufferLineCount { get { return _currentBufferLineCount == 0 ? 1 : (_currentBufferLineCount + 1); } private set { _currentBufferLineCount = value; } } public static int CursorLineOffset { get { return _cursorLineOffset; } set { _cursorLineOffset = value; } } public InputManager(IConsole console, ICommandBase commandBase, string paramName) { _commandBase = commandBase; _console = console; _console.LinesInsertedEvent += LinesInsertedEvent; _paramName = paramName; _startLocation = new Location(_console.CursorLeft, _console.CursorTop); } private void LinesInsertedEvent(object sender, LinesInsertedEventArgs e) { _startLocation = new Location(_startLocation.Left, _startLocation.Top + e.LineCount); } //TODO: Test this function public T ReadLine<T>(KeyValuePair<T, string>[] selection, bool allowEscape) { var inputBuffer = new InputBuffer(); inputBuffer.InputBufferChangedEvent += InputBufferChangedEvent; _console.Write(string.Format("{0}{1}", _paramName, _paramName.Length > 2 ? ": " : string.Empty)); _startLocation = new Location(_console.CursorLeft, _console.CursorTop); while (true) { try { var readKey = _console.ReadKey(true); var currentScreenLocation = new Location(_console.CursorLeft, _console.CursorTop); var currentBufferPosition = ((currentScreenLocation.Top - _startLocation.Top) * _console.BufferWidth) + currentScreenLocation.Left - _startLocation.Left; if ((readKey.KeyChar >= 32 && readKey.KeyChar <= 126) || readKey.Key == ConsoleKey.Oem5) { var input = readKey.KeyChar; InsertText(currentScreenLocation, input, inputBuffer, currentBufferPosition, _startLocation); } else if (readKey.Modifiers == ConsoleModifiers.Control) { switch (readKey.Key) { case ConsoleKey.V: var input = System.Windows.Clipboard.GetText().ToArray(); foreach (var chr in input) { InsertText(currentScreenLocation, chr, inputBuffer, currentBufferPosition, _startLocation); if (currentScreenLocation.Left == _console.BufferWidth - 1) currentScreenLocation = new Location(0, currentScreenLocation.Top + 1); else currentScreenLocation = new Location(currentScreenLocation.Left + 1, currentScreenLocation.Top); currentBufferPosition++; } break; case ConsoleKey.LeftArrow: if (currentBufferPosition > 0) { var leftOfCursor = inputBuffer.ToString().Substring(0, currentBufferPosition).TrimEnd(' '); var last = leftOfCursor.LastIndexOf(' '); if (last != -1) _console.CursorLeft = last + _startLocation.Left + 1; else _console.CursorLeft = _startLocation.Left; } break; case ConsoleKey.RightArrow: var l2 = inputBuffer.ToString().IndexOf(' ', currentBufferPosition); if (l2 != -1) { while (inputBuffer.ToString().Length > l2 + 1 && inputBuffer.ToString()[l2 + 1] == ' ') l2++; _console.CursorLeft = l2 + _startLocation.Left + 1; } else _console.CursorLeft = inputBuffer.ToString().Length + _startLocation.Left; break; default: System.Diagnostics.Debug.WriteLine("No action for ctrl-" + readKey.Key); break; } } else { switch (readKey.Key) { case ConsoleKey.Enter: var response = GetResponse(selection, inputBuffer); RememberCommandHistory(inputBuffer); return response; case ConsoleKey.LeftArrow: if (currentBufferPosition == 0) continue; MoveCursorLeft(); break; case ConsoleKey.RightArrow: if (currentBufferPosition == inputBuffer.Length) continue; MoveCursorRight(); break; case ConsoleKey.Home: MoveCursorToStart(_startLocation); break; case ConsoleKey.End: MoveCursorToEnd(_startLocation, inputBuffer); break; case ConsoleKey.DownArrow: case ConsoleKey.UpArrow: RecallCommandHistory(readKey, inputBuffer); break; case ConsoleKey.Delete: if (currentBufferPosition == inputBuffer.Length) continue; MoveBufferLeft(new Location(currentScreenLocation.Left + 1, currentScreenLocation.Top), inputBuffer, _startLocation); inputBuffer.RemoveAt(currentBufferPosition); CurrentBufferLineCount = (int)Math.Ceiling((decimal)(inputBuffer.Length - _console.BufferWidth + _startLocation.Left + 1) / _console.BufferWidth); break; case ConsoleKey.Backspace: if (currentBufferPosition == 0) continue; MoveBufferLeft(currentScreenLocation, inputBuffer, _startLocation); inputBuffer.RemoveAt(currentBufferPosition - 1); MoveCursorLeft(); CurrentBufferLineCount = (int)Math.Ceiling((decimal)(inputBuffer.Length - _console.BufferWidth + _startLocation.Left + 1) / _console.BufferWidth); break; case ConsoleKey.Escape: if (inputBuffer.IsEmpty && allowEscape) { _console.NewLine(); throw new CommandEscapeException(); } Clear(inputBuffer); break; case ConsoleKey.Tab: if (selection.Any()) { var tabIndex = _tabIndex + 1; if (tabIndex == selection.Length) tabIndex = 0; Clear(inputBuffer); _console.Write(selection[tabIndex].Value); inputBuffer.Add(selection[tabIndex].Value); _tabIndex = tabIndex; CurrentBufferLineCount = (int)Math.Ceiling((decimal)(inputBuffer.Length - _console.BufferWidth + _startLocation.Left + 1) / _console.BufferWidth); } else { InsertText(currentScreenLocation, (char)9, inputBuffer, currentBufferPosition, _startLocation); } break; case ConsoleKey.PageUp: case ConsoleKey.PageDown: case ConsoleKey.LeftWindows: case ConsoleKey.RightWindows: case ConsoleKey.Applications: case ConsoleKey.Insert: case ConsoleKey.F1: case ConsoleKey.F2: case ConsoleKey.F3: case ConsoleKey.F4: case ConsoleKey.F5: case ConsoleKey.F6: case ConsoleKey.F7: case ConsoleKey.F8: case ConsoleKey.F9: case ConsoleKey.F10: case ConsoleKey.F11: case ConsoleKey.F12: case ConsoleKey.F13: //Ignore break; default: throw new ArgumentOutOfRangeException(string.Format("Key {0} is not handled ({1}).", readKey.Key, readKey.KeyChar)); } } CursorLineOffset = _console.CursorTop - _startLocation.Top; } catch (CommandEscapeException) { throw; } catch (Exception exception) { _commandBase.OutputError(exception.Message); } } } private void RecallCommandHistory(ConsoleKeyInfo readKey, InputBuffer inputBuffer) { if (_commandHistory.ContainsKey(_paramName)) { if (_commandHistoryIndex == -1) { _commandHistoryIndex = 0; } else if (readKey.Key == ConsoleKey.UpArrow) { _commandHistoryIndex++; if (_commandHistoryIndex == _commandHistory[_paramName].Count) { _commandHistoryIndex = 0; } } else if (readKey.Key == ConsoleKey.DownArrow) { _commandHistoryIndex--; if (_commandHistoryIndex < 0) { _commandHistoryIndex = _commandHistory[_paramName].Count - 1; } } Clear(inputBuffer); _console.Write(_commandHistory[_paramName][_commandHistoryIndex]); inputBuffer.Add(_commandHistory[_paramName][_commandHistoryIndex]); } } private void RememberCommandHistory(InputBuffer inputBuffer) { if (!_commandHistory.ContainsKey(_paramName)) { _commandHistory.Add(_paramName, new List<string>()); } var inputString = inputBuffer.ToString(); if (_commandHistory[_paramName].All(x => string.Compare(inputString, x, StringComparison.InvariantCulture) != 0) && !string.IsNullOrEmpty(inputString)) { _commandHistory[_paramName].Add(inputString); } } private void Clear(InputBuffer inputBuffer) { MoveCursorToStart(_startLocation); _console.Write(new string(' ', inputBuffer.Length)); MoveCursorToStart(_startLocation); inputBuffer.Clear(); CurrentBufferLineCount = (int)Math.Ceiling((decimal)(inputBuffer.Length - _console.BufferWidth + _startLocation.Left + 1) / _console.BufferWidth); } private T GetResponse<T>(KeyValuePair<T, string>[] selection, InputBuffer inputBuffer) { T response; if (selection.Any()) { if (_tabIndex != -1) { _console.NewLine(); response = selection[_tabIndex].Key; } else { var items = selection.Where(x => x.Value == inputBuffer.ToString()).ToArray(); if(selection.Any(x => x.Value == "") && !string.IsNullOrEmpty( inputBuffer.ToString())) { response = (T)TypeDescriptor.GetConverter(typeof(T)).ConvertFromInvariantString(inputBuffer.ToString()); _console.NewLine(); return response; } if (!items.Any()) throw new EntryException("No item match the entry."); if (items.Count() > 1) throw new EntryException("There are several matches to the entry."); _console.NewLine(); response = items.Single().Key; } } else { response = (T)TypeDescriptor.GetConverter(typeof(T)).ConvertFromInvariantString(inputBuffer.ToString()); _console.NewLine(); } return response; } private void InputBufferChangedEvent(object sender, InputBufferChangedEventArgs e) { _tabIndex = -1; } private void InsertText(Location currentScreenLocation, char input, InputBuffer inputBuffer, int currentBufferPosition, Location startLocation) { //Check if the text to the right is on more than one line var charsToTheRight = inputBuffer.Length - currentBufferPosition; var bufferToTheRight = _console.BufferWidth - currentScreenLocation.Left - startLocation.Left + 1; if (charsToTheRight > bufferToTheRight) { var lines = (int)Math.Ceiling((decimal)(inputBuffer.Length - _console.BufferWidth + startLocation.Left + 1) / _console.BufferWidth); for (var i = lines; i > 0; i--) { _console.MoveBufferArea(0, currentScreenLocation.Top + i - 1 + 1, _console.BufferWidth - 1, 1, 1, currentScreenLocation.Top + i - 1 + 1); _console.MoveBufferArea(_console.BufferWidth - 1, currentScreenLocation.Top + i - 1, 1, 1, 0, currentScreenLocation.Top + i - 1 + 1); } } _console.MoveBufferArea(currentScreenLocation.Left, currentScreenLocation.Top, _console.BufferWidth - currentScreenLocation.Left, 1, currentScreenLocation.Left + 1, currentScreenLocation.Top); if (input == 9) { _console.Write(((char)26).ToString(CultureInfo.InvariantCulture)); } else { _console.Write(input.ToString()); } inputBuffer.Insert(currentBufferPosition, input.ToString(CultureInfo.InvariantCulture)); CurrentBufferLineCount = (int)Math.Ceiling((decimal)(inputBuffer.Length - _console.BufferWidth + _startLocation.Left + 1) / _console.BufferWidth); } private void MoveBufferLeft(Location currentScreenLocation, InputBuffer inputBuffer, Location startLocation) { _console.MoveBufferArea(currentScreenLocation.Left, currentScreenLocation.Top, _console.BufferWidth - currentScreenLocation.Left, 1, currentScreenLocation.Left - 1, currentScreenLocation.Top); var done = _console.BufferWidth - startLocation.Left; var line = 1; while (inputBuffer.Length >= done) { _console.MoveBufferArea(0, currentScreenLocation.Top + line, 1, 1, _console.BufferWidth - 1, currentScreenLocation.Top + line - 1); _console.MoveBufferArea(1, currentScreenLocation.Top + line, _console.BufferWidth - 1, 1, 0, currentScreenLocation.Top + line); done += _console.BufferWidth; line++; } } private void MoveCursorToStart(Location startLocation) { _console.CursorLeft = startLocation.Left; _console.CursorTop = startLocation.Top; } private void MoveCursorToEnd(Location startLocation, InputBuffer inputBuffer) { var pos = startLocation.Left + inputBuffer.Length; var ln = 0; while (pos > _console.BufferWidth) { ln++; pos -= _console.BufferWidth; } _console.CursorLeft = pos; _console.CursorTop = startLocation.Top + ln; } private void MoveCursorRight() { if (_console.CursorLeft == _console.BufferWidth - 1) { _console.CursorTop++; _console.CursorLeft = 0; } else { _console.CursorLeft++; } } private void MoveCursorLeft() { if (_console.CursorLeft == 0) { _console.CursorTop--; _console.CursorLeft = _console.BufferWidth - 1; } else { _console.CursorLeft--; } } } }
// ---------------------------------------------------------------------------------- // // Copyright Microsoft Corporation // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ---------------------------------------------------------------------------------- using AutoMapper; using Microsoft.Azure.Commands.Network.Models; using Microsoft.Azure.Commands.ResourceManager.Common.Tags; using Microsoft.Azure.Management.Network; using System; using System.Collections; using System.Collections.Generic; using System.Management.Automation; using MNM = Microsoft.Azure.Management.Network.Models; namespace Microsoft.Azure.Commands.Network { [Cmdlet(VerbsCommon.New, "AzureRmVirtualNetworkGateway", SupportsShouldProcess = true), OutputType(typeof(PSVirtualNetworkGateway))] public class NewAzureVirtualNetworkGatewayCommand : VirtualNetworkGatewayBaseCmdlet { [Alias("ResourceName")] [Parameter( Mandatory = true, ValueFromPipelineByPropertyName = true, HelpMessage = "The resource name.")] [ValidateNotNullOrEmpty] public virtual string Name { get; set; } [Parameter( Mandatory = true, ValueFromPipelineByPropertyName = true, HelpMessage = "The resource group name.")] [ValidateNotNullOrEmpty] public virtual string ResourceGroupName { get; set; } [Parameter( Mandatory = true, ValueFromPipelineByPropertyName = true, HelpMessage = "location.")] [ValidateNotNullOrEmpty] public string Location { get; set; } [Parameter( Mandatory = false, ValueFromPipelineByPropertyName = true, HelpMessage = "The IpConfigurations for Virtual network gateway.")] [ValidateNotNullOrEmpty] public List<PSVirtualNetworkGatewayIpConfiguration> IpConfigurations { get; set; } [Parameter( Mandatory = false, ValueFromPipelineByPropertyName = true, HelpMessage = "The type of this virtual network gateway: Vpn, ExoressRoute")] [ValidateSet( MNM.VirtualNetworkGatewayType.Vpn, MNM.VirtualNetworkGatewayType.ExpressRoute, IgnoreCase = true)] public string GatewayType { get; set; } [Parameter( Mandatory = false, ValueFromPipelineByPropertyName = true, HelpMessage = "The type of the Vpn:PolicyBased/RouteBased")] [ValidateSet( MNM.VpnType.PolicyBased, MNM.VpnType.RouteBased, IgnoreCase = true)] public string VpnType { get; set; } [Parameter( Mandatory = false, ValueFromPipelineByPropertyName = true, HelpMessage = "EnableBgp Flag")] public bool EnableBgp { get; set; } [Parameter( Mandatory = false, HelpMessage = "Flag to enable Active Active feature on virtual network gateway")] public SwitchParameter EnableActiveActiveFeature { get; set; } [Parameter( Mandatory = false, ValueFromPipelineByPropertyName = true, HelpMessage = "The type of the Vpn:PolicyBased/RouteBased")] [ValidateSet( MNM.VirtualNetworkGatewaySkuTier.Basic, MNM.VirtualNetworkGatewaySkuTier.Standard, MNM.VirtualNetworkGatewaySkuTier.HighPerformance, MNM.VirtualNetworkGatewaySkuTier.UltraPerformance, IgnoreCase = true)] public string GatewaySku { get; set; } [Parameter( Mandatory = false, ValueFromPipelineByPropertyName = true, ParameterSetName = "SetByResource", HelpMessage = "GatewayDefaultSite")] public PSLocalNetworkGateway GatewayDefaultSite { get; set; } [Parameter( Mandatory = false, ValueFromPipelineByPropertyName = true, HelpMessage = "P2S VpnClient AddressPool")] [ValidateNotNullOrEmpty] public List<string> VpnClientAddressPool { get; set; } [Parameter( Mandatory = false, ValueFromPipelineByPropertyName = true, HelpMessage = "The list of VpnClientRootCertificates to be added.")] public List<PSVpnClientRootCertificate> VpnClientRootCertificates { get; set; } [Parameter( Mandatory = false, ValueFromPipelineByPropertyName = true, HelpMessage = "The list of VpnClientCertificates to be revoked.")] public List<PSVpnClientRevokedCertificate> VpnClientRevokedCertificates { get; set; } [Parameter( Mandatory = false, ValueFromPipelineByPropertyName = true, HelpMessage = "The virtual network gateway's ASN for BGP over VPN")] public uint Asn { get; set; } [Parameter( Mandatory = false, ValueFromPipelineByPropertyName = true, HelpMessage = "The weight added to routes learned over BGP from this virtual network gateway")] public int PeerWeight { get; set; } [Parameter( Mandatory = false, ValueFromPipelineByPropertyName = true, HelpMessage = "A hashtable which represents resource tags.")] public Hashtable Tag { get; set; } [Parameter( Mandatory = false, HelpMessage = "Do not ask for confirmation if you want to overrite a resource")] public SwitchParameter Force { get; set; } public override void Execute() { base.Execute(); WriteWarning("The output object type of this cmdlet will be modified in a future release."); var present = this.IsVirtualNetworkGatewayPresent(this.ResourceGroupName, this.Name); string warningMsg = string.Empty; string continueMsg = Properties.Resources.CreatingResourceMessage; bool force = true; if (!string.IsNullOrEmpty(GatewaySku) && GatewaySku.Equals(MNM.VirtualNetworkGatewaySkuTier.UltraPerformance,StringComparison.InvariantCultureIgnoreCase)) { warningMsg = string.Format(Properties.Resources.UltraPerformaceGatewayWarning,this.Name); force = false; } else { warningMsg = string.Format(Properties.Resources.OverwritingResource, this.Name); } if (this.Force.IsPresent) { force = true; } ConfirmAction( force, warningMsg, continueMsg, Name, () => { var virtualNetworkGateway = CreateVirtualNetworkGateway(); WriteObject(virtualNetworkGateway); }, () => present); } private PSVirtualNetworkGateway CreateVirtualNetworkGateway() { var vnetGateway = new PSVirtualNetworkGateway(); vnetGateway.Name = this.Name; vnetGateway.ResourceGroupName = this.ResourceGroupName; vnetGateway.Location = this.Location; if (this.GatewaySku != null) { vnetGateway.Sku = new PSVirtualNetworkGatewaySku(); vnetGateway.Sku.Tier = this.GatewaySku; vnetGateway.Sku.Name = this.GatewaySku; } else { // If gateway sku param value is not passed, set gateway sku to Standard if VpnType is RouteBased and Basic if VpnType is PolicyBased if (this.VpnType != null && this.VpnType.Equals(MNM.VpnType.RouteBased)) { vnetGateway.Sku = new PSVirtualNetworkGatewaySku(); vnetGateway.Sku.Tier = MNM.VirtualNetworkGatewaySkuTier.Standard; vnetGateway.Sku.Name = MNM.VirtualNetworkGatewaySkuTier.Standard; } else { vnetGateway.Sku = new PSVirtualNetworkGatewaySku(); vnetGateway.Sku.Tier = MNM.VirtualNetworkGatewaySkuTier.Basic; vnetGateway.Sku.Name = MNM.VirtualNetworkGatewaySkuTier.Basic; } } if (this.EnableActiveActiveFeature.IsPresent && !vnetGateway.Sku.Tier.Equals(MNM.VirtualNetworkGatewaySkuTier.HighPerformance)) { throw new ArgumentException("Virtual Network Gateway Sku should be " + MNM.VirtualNetworkGatewaySkuTier.HighPerformance + " when Active-Active feature flag is set to True."); } if (this.EnableActiveActiveFeature.IsPresent && !this.VpnType.Equals(MNM.VpnType.RouteBased)) { throw new ArgumentException("Virtual Network Gateway VpnType should be " + MNM.VpnType.RouteBased + " when Active-Active feature flag is set to True."); } if (this.EnableActiveActiveFeature.IsPresent && this.IpConfigurations.Count != 2) { throw new ArgumentException("Virtual Network Gateway should have 2 Gateway IpConfigurations specified when Active-Active feature flag is True."); } if (!this.EnableActiveActiveFeature.IsPresent && this.IpConfigurations.Count == 2) { throw new ArgumentException("Virtual Network Gateway should have Active-Active feature flag set to True as there are 2 Gateway IpConfigurations specified. OR there should be only one Gateway IpConfiguration specified."); } if (this.IpConfigurations != null) { vnetGateway.IpConfigurations = this.IpConfigurations; } if (!string.IsNullOrEmpty(GatewaySku) && GatewaySku.Equals( MNM.VirtualNetworkGatewaySkuTier.UltraPerformance, StringComparison.InvariantCultureIgnoreCase) && !string.IsNullOrEmpty(GatewayType) && !GatewayType.Equals( MNM.VirtualNetworkGatewayType.ExpressRoute.ToString(), StringComparison.InvariantCultureIgnoreCase)) { throw new ArgumentException("Virtual Network Gateway Need to be Express Route when the sku is UltraPerformance."); } vnetGateway.GatewayType = this.GatewayType; vnetGateway.VpnType = this.VpnType; vnetGateway.EnableBgp = this.EnableBgp; vnetGateway.ActiveActive = this.EnableActiveActiveFeature.IsPresent; if (this.GatewayDefaultSite != null) { vnetGateway.GatewayDefaultSite = new PSResourceId(); vnetGateway.GatewayDefaultSite.Id = this.GatewayDefaultSite.Id; } else { vnetGateway.GatewayDefaultSite = null; } if (this.VpnClientAddressPool != null || this.VpnClientRootCertificates != null || this.VpnClientRevokedCertificates != null) { vnetGateway.VpnClientConfiguration = new PSVpnClientConfiguration(); if (this.VpnClientAddressPool != null) { // Make sure passed Virtual Network gateway type is RouteBased if P2S VpnClientAddressPool is specified. if (this.VpnType == null || !this.VpnType.Equals(MNM.VpnType.RouteBased)) { throw new ArgumentException("Virtual Network Gateway VpnType should be :" + MNM.VpnType.RouteBased + " when P2S VpnClientAddressPool is specified."); } vnetGateway.VpnClientConfiguration.VpnClientAddressPool = new PSAddressSpace(); vnetGateway.VpnClientConfiguration.VpnClientAddressPool.AddressPrefixes = this.VpnClientAddressPool; } if (this.VpnClientRootCertificates != null) { vnetGateway.VpnClientConfiguration.VpnClientRootCertificates = this.VpnClientRootCertificates; } if (this.VpnClientRevokedCertificates != null) { vnetGateway.VpnClientConfiguration.VpnClientRevokedCertificates = this.VpnClientRevokedCertificates; } } else { vnetGateway.VpnClientConfiguration = null; } if (this.Asn > 0 || this.PeerWeight > 0) { vnetGateway.BgpSettings = new PSBgpSettings(); vnetGateway.BgpSettings.BgpPeeringAddress = null; // We block modifying the gateway's BgpPeeringAddress (CA) if (this.Asn > 0) { vnetGateway.BgpSettings.Asn = this.Asn; } if (this.PeerWeight > 0) { vnetGateway.BgpSettings.PeerWeight = this.PeerWeight; } else if (this.PeerWeight < 0) { throw new ArgumentException("PeerWeight must be a positive integer"); } } // Map to the sdk object var vnetGatewayModel = Mapper.Map<MNM.VirtualNetworkGateway>(vnetGateway); vnetGatewayModel.Tags = TagsConversionHelper.CreateTagDictionary(this.Tag, validate: true); // Execute the Create VirtualNetwork call this.VirtualNetworkGatewayClient.CreateOrUpdate(this.ResourceGroupName, this.Name, vnetGatewayModel); var getVirtualNetworkGateway = this.GetVirtualNetworkGateway(this.ResourceGroupName, this.Name); return getVirtualNetworkGateway; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /****************************************************************************** * This file is auto-generated from a template file by the GenerateTests.csx * * script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make * * changes, please update the corresponding template and run according to the * * directions listed in the file. * ******************************************************************************/ using System; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.Intrinsics; using System.Runtime.Intrinsics.X86; namespace JIT.HardwareIntrinsics.X86 { public static partial class Program { private static void PermuteDouble1() { var test = new ImmUnaryOpTest__PermuteDouble1(); if (test.IsSupported) { // Validates basic functionality works, using Unsafe.Read test.RunBasicScenario_UnsafeRead(); if (Avx.IsSupported) { // Validates basic functionality works, using Load test.RunBasicScenario_Load(); // Validates basic functionality works, using LoadAligned test.RunBasicScenario_LoadAligned(); } // Validates calling via reflection works, using Unsafe.Read test.RunReflectionScenario_UnsafeRead(); if (Avx.IsSupported) { // Validates calling via reflection works, using Load test.RunReflectionScenario_Load(); // Validates calling via reflection works, using LoadAligned test.RunReflectionScenario_LoadAligned(); } // Validates passing a static member works test.RunClsVarScenario(); // Validates passing a local works, using Unsafe.Read test.RunLclVarScenario_UnsafeRead(); if (Avx.IsSupported) { // Validates passing a local works, using Load test.RunLclVarScenario_Load(); // Validates passing a local works, using LoadAligned test.RunLclVarScenario_LoadAligned(); } // Validates passing the field of a local class works test.RunClassLclFldScenario(); // Validates passing an instance member of a class works test.RunClassFldScenario(); // Validates passing the field of a local struct works test.RunStructLclFldScenario(); // Validates passing an instance member of a struct works test.RunStructFldScenario(); } else { // Validates we throw on unsupported hardware test.RunUnsupportedScenario(); } if (!test.Succeeded) { throw new Exception("One or more scenarios did not complete as expected."); } } } public sealed unsafe class ImmUnaryOpTest__PermuteDouble1 { private struct TestStruct { public Vector256<Double> _fld; public static TestStruct Create() { var testStruct = new TestStruct(); for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetDouble(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Double>, byte>(ref testStruct._fld), ref Unsafe.As<Double, byte>(ref _data[0]), (uint)Unsafe.SizeOf<Vector256<Double>>()); return testStruct; } public void RunStructFldScenario(ImmUnaryOpTest__PermuteDouble1 testClass) { var result = Avx.Permute(_fld, 1); Unsafe.Write(testClass._dataTable.outArrayPtr, result); testClass.ValidateResult(_fld, testClass._dataTable.outArrayPtr); } } private static readonly int LargestVectorSize = 32; private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector256<Double>>() / sizeof(Double); private static readonly int RetElementCount = Unsafe.SizeOf<Vector256<Double>>() / sizeof(Double); private static Double[] _data = new Double[Op1ElementCount]; private static Vector256<Double> _clsVar; private Vector256<Double> _fld; private SimpleUnaryOpTest__DataTable<Double, Double> _dataTable; static ImmUnaryOpTest__PermuteDouble1() { for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetDouble(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Double>, byte>(ref _clsVar), ref Unsafe.As<Double, byte>(ref _data[0]), (uint)Unsafe.SizeOf<Vector256<Double>>()); } public ImmUnaryOpTest__PermuteDouble1() { Succeeded = true; for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetDouble(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Double>, byte>(ref _fld), ref Unsafe.As<Double, byte>(ref _data[0]), (uint)Unsafe.SizeOf<Vector256<Double>>()); for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetDouble(); } _dataTable = new SimpleUnaryOpTest__DataTable<Double, Double>(_data, new Double[RetElementCount], LargestVectorSize); } public bool IsSupported => Avx.IsSupported; public bool Succeeded { get; set; } public void RunBasicScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead)); var result = Avx.Permute( Unsafe.Read<Vector256<Double>>(_dataTable.inArrayPtr), 1 ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr); } public void RunBasicScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load)); var result = Avx.Permute( Avx.LoadVector256((Double*)(_dataTable.inArrayPtr)), 1 ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr); } public void RunBasicScenario_LoadAligned() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_LoadAligned)); var result = Avx.Permute( Avx.LoadAlignedVector256((Double*)(_dataTable.inArrayPtr)), 1 ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr); } public void RunReflectionScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead)); var result = typeof(Avx).GetMethod(nameof(Avx.Permute), new Type[] { typeof(Vector256<Double>), typeof(byte) }) .Invoke(null, new object[] { Unsafe.Read<Vector256<Double>>(_dataTable.inArrayPtr), (byte)1 }); Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Double>)(result)); ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr); } public void RunReflectionScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load)); var result = typeof(Avx).GetMethod(nameof(Avx.Permute), new Type[] { typeof(Vector256<Double>), typeof(byte) }) .Invoke(null, new object[] { Avx.LoadVector256((Double*)(_dataTable.inArrayPtr)), (byte)1 }); Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Double>)(result)); ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr); } public void RunReflectionScenario_LoadAligned() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_LoadAligned)); var result = typeof(Avx).GetMethod(nameof(Avx.Permute), new Type[] { typeof(Vector256<Double>), typeof(byte) }) .Invoke(null, new object[] { Avx.LoadAlignedVector256((Double*)(_dataTable.inArrayPtr)), (byte)1 }); Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Double>)(result)); ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr); } public void RunClsVarScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario)); var result = Avx.Permute( _clsVar, 1 ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_clsVar, _dataTable.outArrayPtr); } public void RunLclVarScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead)); var firstOp = Unsafe.Read<Vector256<Double>>(_dataTable.inArrayPtr); var result = Avx.Permute(firstOp, 1); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(firstOp, _dataTable.outArrayPtr); } public void RunLclVarScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load)); var firstOp = Avx.LoadVector256((Double*)(_dataTable.inArrayPtr)); var result = Avx.Permute(firstOp, 1); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(firstOp, _dataTable.outArrayPtr); } public void RunLclVarScenario_LoadAligned() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_LoadAligned)); var firstOp = Avx.LoadAlignedVector256((Double*)(_dataTable.inArrayPtr)); var result = Avx.Permute(firstOp, 1); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(firstOp, _dataTable.outArrayPtr); } public void RunClassLclFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario)); var test = new ImmUnaryOpTest__PermuteDouble1(); var result = Avx.Permute(test._fld, 1); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld, _dataTable.outArrayPtr); } public void RunClassFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario)); var result = Avx.Permute(_fld, 1); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_fld, _dataTable.outArrayPtr); } public void RunStructLclFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario)); var test = TestStruct.Create(); var result = Avx.Permute(test._fld, 1); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld, _dataTable.outArrayPtr); } public void RunStructFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario)); var test = TestStruct.Create(); test.RunStructFldScenario(this); } public void RunUnsupportedScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario)); bool succeeded = false; try { RunBasicScenario_UnsafeRead(); } catch (PlatformNotSupportedException) { succeeded = true; } if (!succeeded) { Succeeded = false; } } private void ValidateResult(Vector256<Double> firstOp, void* result, [CallerMemberName] string method = "") { Double[] inArray = new Double[Op1ElementCount]; Double[] outArray = new Double[RetElementCount]; Unsafe.WriteUnaligned(ref Unsafe.As<Double, byte>(ref inArray[0]), firstOp); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector256<Double>>()); ValidateResult(inArray, outArray, method); } private void ValidateResult(void* firstOp, void* result, [CallerMemberName] string method = "") { Double[] inArray = new Double[Op1ElementCount]; Double[] outArray = new Double[RetElementCount]; Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref inArray[0]), ref Unsafe.AsRef<byte>(firstOp), (uint)Unsafe.SizeOf<Vector256<Double>>()); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector256<Double>>()); ValidateResult(inArray, outArray, method); } private void ValidateResult(Double[] firstOp, Double[] result, [CallerMemberName] string method = "") { bool succeeded = true; if (BitConverter.DoubleToInt64Bits(result[0]) != BitConverter.DoubleToInt64Bits(firstOp[1])) { succeeded = false; } else { for (var i = 1; i < RetElementCount; i++) { if (BitConverter.DoubleToInt64Bits(result[2]) != BitConverter.DoubleToInt64Bits(firstOp[2]) || BitConverter.DoubleToInt64Bits(result[2]) != BitConverter.DoubleToInt64Bits(firstOp[2])) { succeeded = false; break; } } } if (!succeeded) { TestLibrary.TestFramework.LogInformation($"{nameof(Avx)}.{nameof(Avx.Permute)}<Double>(Vector256<Double><9>): {method} failed:"); TestLibrary.TestFramework.LogInformation($" firstOp: ({string.Join(", ", firstOp)})"); TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})"); TestLibrary.TestFramework.LogInformation(string.Empty); Succeeded = false; } } } }
using Microsoft.AspNetCore.Builder; using Microsoft.AspNetCore.Hosting; using Microsoft.AspNetCore.Identity.EntityFrameworkCore; using Microsoft.EntityFrameworkCore; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; using MusicStore.Components; using MusicStore.Models; namespace MusicStore { public class Startup { private readonly Platform _platform; public Startup(IHostingEnvironment hostingEnvironment) { // Below code demonstrates usage of multiple configuration sources. For instance a setting say 'setting1' // is found in both the registered sources, then the later source will win. By this way a Local config // can be overridden by a different setting while deployed remotely. var builder = new ConfigurationBuilder() .SetBasePath(hostingEnvironment.ContentRootPath) .AddJsonFile("config.json") //All environment variables in the process's context flow in as configuration values. .AddEnvironmentVariables(); Configuration = builder.Build(); _platform = new Platform(); } public IConfiguration Configuration { get; private set; } public void ConfigureServices(IServiceCollection services) { services.Configure<AppSettings>(Configuration.GetSection("AppSettings")); // Add EF services to the services container if (_platform.UseInMemoryStore) { services.AddDbContext<MusicStoreContext>(options => options.UseInMemoryDatabase()); } else { services.AddDbContext<MusicStoreContext>(options => options.UseSqlServer(Configuration[StoreConfig.ConnectionStringKey.Replace("__", ":")])); } // Add Identity services to the services container services.AddIdentity<ApplicationUser, IdentityRole>(options => { options.Cookies.ApplicationCookie.AccessDeniedPath = "/Home/AccessDenied"; }) .AddEntityFrameworkStores<MusicStoreContext>() .AddDefaultTokenProviders(); services.AddCors(options => { options.AddPolicy("CorsPolicy", builder => { builder.WithOrigins("http://example.com"); }); }); services.AddLogging(); // Add MVC services to the services container services.AddMvc(); // Add memory cache services services.AddMemoryCache(); services.AddDistributedMemoryCache(); // Add session related services. services.AddSession(); // Add the system clock service services.AddSingleton<ISystemClock, SystemClock>(); // Configure Auth services.AddAuthorization(options => { options.AddPolicy( "ManageStore", authBuilder => { authBuilder.RequireClaim("ManageStore", "Allowed"); }); }); } //This method is invoked when ASPNETCORE_ENVIRONMENT is 'Development' or is not defined //The allowed values are Development,Staging and Production public void ConfigureDevelopment(IApplicationBuilder app, ILoggerFactory loggerFactory) { loggerFactory.AddConsole(minLevel: LogLevel.Information); // StatusCode pages to gracefully handle status codes 400-599. app.UseStatusCodePagesWithRedirects("~/Home/StatusCodePage"); // Display custom error page in production when error occurs // During development use the ErrorPage middleware to display error information in the browser app.UseDeveloperExceptionPage(); app.UseDatabaseErrorPage(); Configure(app); } //This method is invoked when ASPNETCORE_ENVIRONMENT is 'Staging' //The allowed values are Development,Staging and Production public void ConfigureStaging(IApplicationBuilder app, ILoggerFactory loggerFactory) { loggerFactory.AddConsole(minLevel: LogLevel.Warning); // StatusCode pages to gracefully handle status codes 400-599. app.UseStatusCodePagesWithRedirects("~/Home/StatusCodePage"); app.UseExceptionHandler("/Home/Error"); Configure(app); } //This method is invoked when ASPNETCORE_ENVIRONMENT is 'Production' //The allowed values are Development,Staging and Production public void ConfigureProduction(IApplicationBuilder app, ILoggerFactory loggerFactory) { loggerFactory.AddConsole(minLevel: LogLevel.Warning); // StatusCode pages to gracefully handle status codes 400-599. app.UseStatusCodePagesWithRedirects("~/Home/StatusCodePage"); app.UseExceptionHandler("/Home/Error"); Configure(app); } public void Configure(IApplicationBuilder app) { // Configure Session. app.UseSession(); // Add static files to the request pipeline app.UseStaticFiles(); // Add cookie-based authentication to the request pipeline app.UseIdentity(); app.UseFacebookAuthentication(new FacebookOptions { AppId = "550624398330273", AppSecret = "10e56a291d6b618da61b1e0dae3a8954" }); app.UseGoogleAuthentication(new GoogleOptions { ClientId = "995291875932-0rt7417v5baevqrno24kv332b7d6d30a.apps.googleusercontent.com", ClientSecret = "J_AT57H5KH_ItmMdu0r6PfXm" }); app.UseTwitterAuthentication(new TwitterOptions { ConsumerKey = "lDSPIu480ocnXYZ9DumGCDw37", ConsumerSecret = "fpo0oWRNc3vsZKlZSq1PyOSoeXlJd7NnG4Rfc94xbFXsdcc3nH" }); // The MicrosoftAccount service has restrictions that prevent the use of // http://localhost:5001/ for test applications. // As such, here is how to change this sample to uses http://ktesting.com:5001/ instead. // Edit the Project.json file and replace http://localhost:5001/ with http://ktesting.com:5001/. // From an admin command console first enter: // notepad C:\Windows\System32\drivers\etc\hosts // and add this to the file, save, and exit (and reboot?): // 127.0.0.1 ktesting.com // Then you can choose to run the app as admin (see below) or add the following ACL as admin: // netsh http add urlacl url=http://ktesting:5001/ user=[domain\user] // The sample app can then be run via: // dnx . web app.UseMicrosoftAccountAuthentication(new MicrosoftAccountOptions { DisplayName = "MicrosoftAccount - Requires project changes", ClientId = "000000004012C08A", ClientSecret = "GaMQ2hCnqAC6EcDLnXsAeBVIJOLmeutL" }); // Add MVC to the request pipeline app.UseMvc(routes => { routes.MapRoute( name: "areaRoute", template: "{area:exists}/{controller}/{action}", defaults: new { action = "Index" }); routes.MapRoute( name: "default", template: "{controller}/{action}/{id?}", defaults: new { controller = "Home", action = "Index" }); routes.MapRoute( name: "api", template: "{controller}/{id?}"); }); //Populates the MusicStore sample data SampleData.InitializeMusicStoreDatabaseAsync(app.ApplicationServices).Wait(); } } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Linq; using System.Text; namespace Fairweather.Service { static partial class Extensions { public static string str(this object obj) { return obj.ToString(); } public static string strdef(this object obj) { return (obj ?? (object)"").ToString(); } public static string strdef(this object obj, string def) { return (obj ?? (object)def).ToString(); } public static T To<T>(this object obj) { return ((T)obj); } // http://stackoverflow.com/questions/271398/post-your-extension-goodies-for-c-net-codeplex-com-extensionoverflow?answer=274652#274652 public static T To2<T>(this IConvertible obj) { return (T)Convert.ChangeType(obj, typeof(T)); } // http://jacobcarpenters.blogspot.com/2006/06/c-30-and-delegate-conversion.html public static T Convert_Delegate<T>(this Delegate source) where T : class { if (source.GetInvocationList().Length > 1) throw new ArgumentException("Cannot safely convert MulticastDelegate"); var ret = Delegate.CreateDelegate(typeof(T), source.Target, source.Method) as T; return ret; } /* Casts */ public static T To_Enum<T>(this string value, bool ignore_case) where T : struct { return (T)Enum.Parse(typeof(T), value, ignore_case); } public static T To_Enum<T>(this object value) where T : struct { return (T)Enum.ToObject(typeof(T), value); } public static T? To_Enum_<T>(this object value) where T : struct { if (Enum.IsDefined(typeof(T), value)) return (T?)Enum.ToObject(typeof(T), value); return null; } public static T To_Enum<T>(this int value) where T : struct { return (T)Enum.ToObject(typeof(T), value); } public static T? To_Enum_<T>(this int value) where T : struct { if (Enum.IsDefined(typeof(T), value)) return (T?)Enum.ToObject(typeof(T), value); return null; } public static Int64? ToInt64_(this object obj) { if (obj.IsNullOrEmpty()) return null; try { return Convert.ToInt64(obj); } catch (FormatException) { return null; } catch (InvalidCastException) { return null; } } public static Int64 ToInt64(this object obj) { return Convert.ToInt64(obj); } public static Decimal? ToDecimal_(this object obj) { if (obj.IsNullOrEmpty()) return null; try { return Convert.ToDecimal(obj); } catch (FormatException) { return null; } catch (InvalidCastException) { return null; } } public static Decimal ToDecimal(this object obj) { return Convert.ToDecimal(obj); } public static Double? ToDouble_(this object obj) { if (obj.IsNullOrEmpty()) return null; try { return Convert.ToDouble(obj); } catch (FormatException) { return null; } catch (InvalidCastException) { return null; } } public static Double ToDouble(this object obj) { return Convert.ToDouble(obj); } public static Single? ToSingle_(this object obj) { if (obj.IsNullOrEmpty()) return null; try { return Convert.ToSingle(obj); } catch (FormatException) { return null; } catch (InvalidCastException) { return null; } } public static Single ToSingle(this object obj) { return Convert.ToSingle(obj); } public static short ToInt16(this object obj) { return Convert.ToInt16(obj); } public static short? ToInt16_(this object obj) { if (obj.IsNullOrEmpty()) return null; try { return Convert.ToInt16(obj); } catch (FormatException) { return null; } catch (InvalidCastException) { return null; } } public static int ToInt32(this object obj) { return Convert.ToInt32(obj); } public static int? ToInt32_(this object obj) { if (obj.IsNullOrEmpty()) return null; try { return Convert.ToInt32(obj); } catch (FormatException) { return null; } catch (InvalidCastException) { return null; } } public static UInt16? ToUInt16_(this object obj) { if (obj.IsNullOrEmpty()) return null; try { return Convert.ToUInt16(obj); } catch (FormatException) { return null; } catch (InvalidCastException) { return null; } } public static UInt16 ToUInt16(this object obj) { return Convert.ToUInt16(obj); } public static UInt64? ToUInt64_(this object obj) { if (obj.IsNullOrEmpty()) return null; try { return Convert.ToUInt64(obj); } catch (FormatException) { return null; } catch (InvalidCastException) { return null; } } public static UInt64 ToUInt64(this object obj) { return Convert.ToUInt64(obj); } public static DateTime? ToDateTime_(this object obj) { if (obj.IsNullOrEmpty()) return null; try { return Convert.ToDateTime(obj); } catch (FormatException) { return null; } catch (InvalidCastException) { return null; } } public static DateTime ToDateTime(this object obj) { return Convert.ToDateTime(obj); } public static Char? ToChar_(this object obj) { if (obj.IsNullOrEmpty()) return null; try { return Convert.ToChar(obj); } catch (FormatException) { return null; } catch (InvalidCastException) { return null; } } public static Char ToChar(this object obj) { return Convert.ToChar(obj); } public static UInt32? ToUInt32_(this object obj) { if (obj.IsNullOrEmpty()) return null; try { return Convert.ToUInt32(obj); } catch (FormatException) { return null; } catch (InvalidCastException) { return null; } } public static UInt32 ToUInt32(this object obj) { return Convert.ToUInt32(obj); } public static SByte? ToSByte_(this object obj) { if (obj.IsNullOrEmpty()) return null; try { return Convert.ToSByte(obj); } catch (FormatException) { return null; } catch (InvalidCastException) { return null; } } public static SByte ToSByte(this object obj) { return Convert.ToSByte(obj); } public static Byte? ToByte_(this object obj) { if (obj.IsNullOrEmpty()) return null; try { return Convert.ToByte(obj); } catch (FormatException) { return null; } catch (InvalidCastException) { return null; } } public static Byte ToByte(this object obj) { return Convert.ToByte(obj); } public static bool? ToBool_(this object obj) { if (obj.IsNullOrEmpty()) return null; try { return Convert.ToBoolean(obj); } catch (FormatException) { return null; } catch (InvalidCastException) { return null; } } public static bool ToBool(this object obj) { return Convert.ToBoolean(obj); } // **************************** #if !LITE public static IRead<TKey, TValue> ro<TKey, TValue>(this IReadWrite<TKey, TValue> irw) { return new Proxy<TKey, TValue>(irw); } public static ReadOnlyCollection<T> ro<T>(this T[] array) { return Array.AsReadOnly(array); } static public Dictionary<T1, T2> dict<T1, T2>(this IEnumerable<Pair<T1, T2>> seq) { return seq.ToDictionary(pair => pair.First, pair => pair.Second); } static public IEnumerable<Pair<T1, T2>> pairs<T1, T2>(this IEnumerable<KeyValuePair<T1, T2>> seq) { foreach (var kvp in seq) yield return (Pair<T1, T2>)kvp; //return seq.Cast<Pair<T1, T2>>(); } public static IEnumerable<KeyValuePair<T1, T2>> kvps<T1, T2>(this IEnumerable<Pair<T1, T2>> seq) { return seq.Cast<KeyValuePair<T1, T2>>(); } static public RW_Dict<TIx, TValue> rw<TIx, TValue>(this Dictionary<TIx, TValue> dict) { return new RW_Dict<TIx, TValue>(dict); } static public RW_IDict<TIx, TValue> rwi<TIx, TValue>(this IDictionary<TIx, TValue> dict) { return new RW_IDict<TIx, TValue>(dict); } static public RW_List<TValue> rw<TIx, TValue>(this List<TValue> list) { return new RW_List<TValue>(list); } static public RW_IList<TValue> rwi<TIx, TValue>(this IList<TValue> list) { return new RW_IList<TValue>(list); } static public RW_Array<TValue> rw<TIx, TValue>(this TValue[] arr) { return new RW_Array<TValue>(arr); } public static string str(this IEnumerable<char> chars) { string ret; int cnt = chars.Count(); if (cnt < 40) { ret = new String(chars.ToArray()); } else { var sb = new StringBuilder(cnt); sb.Append(chars.ToArray()); ret = sb.ToString(); } return ret; } public static T[] arr<T>(this IEnumerable<T> seq) { return seq.ToArray(); } public static List<T> lst<T>(this IEnumerable<T> seq) { return seq.ToList(); } public static Set<T> set<T>(this IEnumerable<T> seq) { return new Set<T>(seq); } public static Stack<T> stack<T>(this IEnumerable<T> seq) { return new Stack<T>(seq); } public static Queue<T> queue<T>(this IEnumerable<T> seq) { return new Queue<T>(seq); } // **************************** public static List<T> lsta<T>(this IEnumerable<T> seq) { return seq as List<T> ?? seq.ToList(); } public static Set<T> seta<T>(this IEnumerable<T> seq) { return seq as Set<T> ?? seq.set(); } public static Stack<T> stacka<T>(this IEnumerable<T> seq) { return seq as Stack<T> ?? new Stack<T>(seq); } public static Queue<T> queuea<T>(this IEnumerable<T> seq) { return seq as Queue<T> ?? new Queue<T>(seq); } #endif } }
using Microsoft.IdentityModel; using Microsoft.IdentityModel.S2S.Protocols.OAuth2; using Microsoft.IdentityModel.S2S.Tokens; using Microsoft.SharePoint.Client; using Microsoft.SharePoint.Client.EventReceivers; using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Globalization; using System.IdentityModel.Selectors; using System.IdentityModel.Tokens; using System.IO; using System.Linq; using System.Net; using System.Security.Cryptography.X509Certificates; using System.Security.Principal; using System.ServiceModel; using System.Text; using System.Web; using System.Web.Configuration; using System.Web.Script.Serialization; using AudienceRestriction = Microsoft.IdentityModel.Tokens.AudienceRestriction; using AudienceUriValidationFailedException = Microsoft.IdentityModel.Tokens.AudienceUriValidationFailedException; using SecurityTokenHandlerConfiguration = Microsoft.IdentityModel.Tokens.SecurityTokenHandlerConfiguration; using X509SigningCredentials = Microsoft.IdentityModel.SecurityTokenService.X509SigningCredentials; namespace Core.JavaScriptInjection.WeekNumbersWeb { public static class TokenHelper { #region public fields /// <summary> /// SharePoint principal. /// </summary> public const string SharePointPrincipal = "00000003-0000-0ff1-ce00-000000000000"; /// <summary> /// Lifetime of HighTrust access token, 12 hours. /// </summary> public static readonly TimeSpan HighTrustAccessTokenLifetime = TimeSpan.FromHours(12.0); #endregion public fields #region public methods /// <summary> /// Retrieves the context token string from the specified request by looking for well-known parameter names in the /// POSTed form parameters and the querystring. Returns null if no context token is found. /// </summary> /// <param name="request">HttpRequest in which to look for a context token</param> /// <returns>The context token string</returns> public static string GetContextTokenFromRequest(HttpRequest request) { return GetContextTokenFromRequest(new HttpRequestWrapper(request)); } /// <summary> /// Retrieves the context token string from the specified request by looking for well-known parameter names in the /// POSTed form parameters and the querystring. Returns null if no context token is found. /// </summary> /// <param name="request">HttpRequest in which to look for a context token</param> /// <returns>The context token string</returns> public static string GetContextTokenFromRequest(HttpRequestBase request) { string[] paramNames = { "AppContext", "AppContextToken", "AccessToken", "SPAppToken" }; foreach (string paramName in paramNames) { if (!string.IsNullOrEmpty(request.Form[paramName])) { return request.Form[paramName]; } if (!string.IsNullOrEmpty(request.QueryString[paramName])) { return request.QueryString[paramName]; } } return null; } /// <summary> /// Validate that a specified context token string is intended for this application based on the parameters /// specified in web.config. Parameters used from web.config used for validation include ClientId, /// HostedAppHostNameOverride, HostedAppHostName, ClientSecret, and Realm (if it is specified). If HostedAppHostNameOverride is present, /// it will be used for validation. Otherwise, if the <paramref name="appHostName"/> is not /// null, it is used for validation instead of the web.config's HostedAppHostName. If the token is invalid, an /// exception is thrown. If the token is valid, TokenHelper's static STS metadata url is updated based on the token contents /// and a JsonWebSecurityToken based on the context token is returned. /// </summary> /// <param name="contextTokenString">The context token to validate</param> /// <param name="appHostName">The URL authority, consisting of Domain Name System (DNS) host name or IP address and the port number, to use for token audience validation. /// If null, HostedAppHostName web.config setting is used instead. HostedAppHostNameOverride web.config setting, if present, will be used /// for validation instead of <paramref name="appHostName"/> .</param> /// <returns>A JsonWebSecurityToken based on the context token.</returns> public static SharePointContextToken ReadAndValidateContextToken(string contextTokenString, string appHostName = null) { JsonWebSecurityTokenHandler tokenHandler = CreateJsonWebSecurityTokenHandler(); SecurityToken securityToken = tokenHandler.ReadToken(contextTokenString); JsonWebSecurityToken jsonToken = securityToken as JsonWebSecurityToken; SharePointContextToken token = SharePointContextToken.Create(jsonToken); string stsAuthority = (new Uri(token.SecurityTokenServiceUri)).Authority; int firstDot = stsAuthority.IndexOf('.'); GlobalEndPointPrefix = stsAuthority.Substring(0, firstDot); AcsHostUrl = stsAuthority.Substring(firstDot + 1); tokenHandler.ValidateToken(jsonToken); string[] acceptableAudiences; if (!String.IsNullOrEmpty(HostedAppHostNameOverride)) { acceptableAudiences = HostedAppHostNameOverride.Split(';'); } else if (appHostName == null) { acceptableAudiences = new[] { HostedAppHostName }; } else { acceptableAudiences = new[] { appHostName }; } bool validationSuccessful = false; string realm = Realm ?? token.Realm; foreach (var audience in acceptableAudiences) { string principal = GetFormattedPrincipal(ClientId, audience, realm); if (StringComparer.OrdinalIgnoreCase.Equals(token.Audience, principal)) { validationSuccessful = true; break; } } if (!validationSuccessful) { throw new AudienceUriValidationFailedException( String.Format(CultureInfo.CurrentCulture, "\"{0}\" is not the intended audience \"{1}\"", String.Join(";", acceptableAudiences), token.Audience)); } return token; } /// <summary> /// Retrieves an access token from ACS to call the source of the specified context token at the specified /// targetHost. The targetHost must be registered for the principal that sent the context token. /// </summary> /// <param name="contextToken">Context token issued by the intended access token audience</param> /// <param name="targetHost">Url authority of the target principal</param> /// <returns>An access token with an audience matching the context token's source</returns> public static OAuth2AccessTokenResponse GetAccessToken(SharePointContextToken contextToken, string targetHost) { string targetPrincipalName = contextToken.TargetPrincipalName; // Extract the refreshToken from the context token string refreshToken = contextToken.RefreshToken; if (String.IsNullOrEmpty(refreshToken)) { return null; } string targetRealm = Realm ?? contextToken.Realm; return GetAccessToken(refreshToken, targetPrincipalName, targetHost, targetRealm); } /// <summary> /// Uses the specified authorization code to retrieve an access token from ACS to call the specified principal /// at the specified targetHost. The targetHost must be registered for target principal. If specified realm is /// null, the "Realm" setting in web.config will be used instead. /// </summary> /// <param name="authorizationCode">Authorization code to exchange for access token</param> /// <param name="targetPrincipalName">Name of the target principal to retrieve an access token for</param> /// <param name="targetHost">Url authority of the target principal</param> /// <param name="targetRealm">Realm to use for the access token's nameid and audience</param> /// <param name="redirectUri">Redirect URI registerd for this app</param> /// <returns>An access token with an audience of the target principal</returns> public static OAuth2AccessTokenResponse GetAccessToken( string authorizationCode, string targetPrincipalName, string targetHost, string targetRealm, Uri redirectUri) { if (targetRealm == null) { targetRealm = Realm; } string resource = GetFormattedPrincipal(targetPrincipalName, targetHost, targetRealm); string clientId = GetFormattedPrincipal(ClientId, null, targetRealm); // Create request for token. The RedirectUri is null here. This will fail if redirect uri is registered OAuth2AccessTokenRequest oauth2Request = OAuth2MessageFactory.CreateAccessTokenRequestWithAuthorizationCode( clientId, ClientSecret, authorizationCode, redirectUri, resource); // Get token OAuth2S2SClient client = new OAuth2S2SClient(); OAuth2AccessTokenResponse oauth2Response; try { oauth2Response = client.Issue(AcsMetadataParser.GetStsUrl(targetRealm), oauth2Request) as OAuth2AccessTokenResponse; } catch (WebException wex) { using (StreamReader sr = new StreamReader(wex.Response.GetResponseStream())) { string responseText = sr.ReadToEnd(); throw new WebException(wex.Message + " - " + responseText, wex); } } return oauth2Response; } /// <summary> /// Uses the specified refresh token to retrieve an access token from ACS to call the specified principal /// at the specified targetHost. The targetHost must be registered for target principal. If specified realm is /// null, the "Realm" setting in web.config will be used instead. /// </summary> /// <param name="refreshToken">Refresh token to exchange for access token</param> /// <param name="targetPrincipalName">Name of the target principal to retrieve an access token for</param> /// <param name="targetHost">Url authority of the target principal</param> /// <param name="targetRealm">Realm to use for the access token's nameid and audience</param> /// <returns>An access token with an audience of the target principal</returns> public static OAuth2AccessTokenResponse GetAccessToken( string refreshToken, string targetPrincipalName, string targetHost, string targetRealm) { if (targetRealm == null) { targetRealm = Realm; } string resource = GetFormattedPrincipal(targetPrincipalName, targetHost, targetRealm); string clientId = GetFormattedPrincipal(ClientId, null, targetRealm); OAuth2AccessTokenRequest oauth2Request = OAuth2MessageFactory.CreateAccessTokenRequestWithRefreshToken(clientId, ClientSecret, refreshToken, resource); // Get token OAuth2S2SClient client = new OAuth2S2SClient(); OAuth2AccessTokenResponse oauth2Response; try { oauth2Response = client.Issue(AcsMetadataParser.GetStsUrl(targetRealm), oauth2Request) as OAuth2AccessTokenResponse; } catch (WebException wex) { using (StreamReader sr = new StreamReader(wex.Response.GetResponseStream())) { string responseText = sr.ReadToEnd(); throw new WebException(wex.Message + " - " + responseText, wex); } } return oauth2Response; } /// <summary> /// Retrieves an app-only access token from ACS to call the specified principal /// at the specified targetHost. The targetHost must be registered for target principal. If specified realm is /// null, the "Realm" setting in web.config will be used instead. /// </summary> /// <param name="targetPrincipalName">Name of the target principal to retrieve an access token for</param> /// <param name="targetHost">Url authority of the target principal</param> /// <param name="targetRealm">Realm to use for the access token's nameid and audience</param> /// <returns>An access token with an audience of the target principal</returns> public static OAuth2AccessTokenResponse GetAppOnlyAccessToken( string targetPrincipalName, string targetHost, string targetRealm) { if (targetRealm == null) { targetRealm = Realm; } string resource = GetFormattedPrincipal(targetPrincipalName, targetHost, targetRealm); string clientId = GetFormattedPrincipal(ClientId, HostedAppHostName, targetRealm); OAuth2AccessTokenRequest oauth2Request = OAuth2MessageFactory.CreateAccessTokenRequestWithClientCredentials(clientId, ClientSecret, resource); oauth2Request.Resource = resource; // Get token OAuth2S2SClient client = new OAuth2S2SClient(); OAuth2AccessTokenResponse oauth2Response; try { oauth2Response = client.Issue(AcsMetadataParser.GetStsUrl(targetRealm), oauth2Request) as OAuth2AccessTokenResponse; } catch (WebException wex) { using (StreamReader sr = new StreamReader(wex.Response.GetResponseStream())) { string responseText = sr.ReadToEnd(); throw new WebException(wex.Message + " - " + responseText, wex); } } return oauth2Response; } /// <summary> /// Creates a client context based on the properties of a remote event receiver /// </summary> /// <param name="properties">Properties of a remote event receiver</param> /// <returns>A ClientContext ready to call the web where the event originated</returns> public static ClientContext CreateRemoteEventReceiverClientContext(SPRemoteEventProperties properties) { Uri sharepointUrl; if (properties.ListEventProperties != null) { sharepointUrl = new Uri(properties.ListEventProperties.WebUrl); } else if (properties.ItemEventProperties != null) { sharepointUrl = new Uri(properties.ItemEventProperties.WebUrl); } else if (properties.WebEventProperties != null) { sharepointUrl = new Uri(properties.WebEventProperties.FullUrl); } else { return null; } if (IsHighTrustApp()) { return GetS2SClientContextWithWindowsIdentity(sharepointUrl, null); } return CreateAcsClientContextForUrl(properties, sharepointUrl); } /// <summary> /// Creates a client context based on the properties of an app event /// </summary> /// <param name="properties">Properties of an app event</param> /// <param name="useAppWeb">True to target the app web, false to target the host web</param> /// <returns>A ClientContext ready to call the app web or the parent web</returns> public static ClientContext CreateAppEventClientContext(SPRemoteEventProperties properties, bool useAppWeb) { if (properties.AppEventProperties == null) { return null; } Uri sharepointUrl = useAppWeb ? properties.AppEventProperties.AppWebFullUrl : properties.AppEventProperties.HostWebFullUrl; if (IsHighTrustApp()) { return GetS2SClientContextWithWindowsIdentity(sharepointUrl, null); } return CreateAcsClientContextForUrl(properties, sharepointUrl); } /// <summary> /// Retrieves an access token from ACS using the specified authorization code, and uses that access token to /// create a client context /// </summary> /// <param name="targetUrl">Url of the target SharePoint site</param> /// <param name="authorizationCode">Authorization code to use when retrieving the access token from ACS</param> /// <param name="redirectUri">Redirect URI registerd for this app</param> /// <returns>A ClientContext ready to call targetUrl with a valid access token</returns> public static ClientContext GetClientContextWithAuthorizationCode( string targetUrl, string authorizationCode, Uri redirectUri) { return GetClientContextWithAuthorizationCode(targetUrl, SharePointPrincipal, authorizationCode, GetRealmFromTargetUrl(new Uri(targetUrl)), redirectUri); } /// <summary> /// Retrieves an access token from ACS using the specified authorization code, and uses that access token to /// create a client context /// </summary> /// <param name="targetUrl">Url of the target SharePoint site</param> /// <param name="targetPrincipalName">Name of the target SharePoint principal</param> /// <param name="authorizationCode">Authorization code to use when retrieving the access token from ACS</param> /// <param name="targetRealm">Realm to use for the access token's nameid and audience</param> /// <param name="redirectUri">Redirect URI registerd for this app</param> /// <returns>A ClientContext ready to call targetUrl with a valid access token</returns> public static ClientContext GetClientContextWithAuthorizationCode( string targetUrl, string targetPrincipalName, string authorizationCode, string targetRealm, Uri redirectUri) { Uri targetUri = new Uri(targetUrl); string accessToken = GetAccessToken(authorizationCode, targetPrincipalName, targetUri.Authority, targetRealm, redirectUri).AccessToken; return GetClientContextWithAccessToken(targetUrl, accessToken); } /// <summary> /// Uses the specified access token to create a client context /// </summary> /// <param name="targetUrl">Url of the target SharePoint site</param> /// <param name="accessToken">Access token to be used when calling the specified targetUrl</param> /// <returns>A ClientContext ready to call targetUrl with the specified access token</returns> public static ClientContext GetClientContextWithAccessToken(string targetUrl, string accessToken) { ClientContext clientContext = new ClientContext(targetUrl); clientContext.AuthenticationMode = ClientAuthenticationMode.Anonymous; clientContext.FormDigestHandlingEnabled = false; clientContext.ExecutingWebRequest += delegate(object oSender, WebRequestEventArgs webRequestEventArgs) { webRequestEventArgs.WebRequestExecutor.RequestHeaders["Authorization"] = "Bearer " + accessToken; }; return clientContext; } /// <summary> /// Retrieves an access token from ACS using the specified context token, and uses that access token to create /// a client context /// </summary> /// <param name="targetUrl">Url of the target SharePoint site</param> /// <param name="contextTokenString">Context token received from the target SharePoint site</param> /// <param name="appHostUrl">Url authority of the hosted app. If this is null, the value in the HostedAppHostName /// of web.config will be used instead</param> /// <returns>A ClientContext ready to call targetUrl with a valid access token</returns> public static ClientContext GetClientContextWithContextToken( string targetUrl, string contextTokenString, string appHostUrl) { SharePointContextToken contextToken = ReadAndValidateContextToken(contextTokenString, appHostUrl); Uri targetUri = new Uri(targetUrl); string accessToken = GetAccessToken(contextToken, targetUri.Authority).AccessToken; return GetClientContextWithAccessToken(targetUrl, accessToken); } /// <summary> /// Returns the SharePoint url to which the app should redirect the browser to request consent and get back /// an authorization code. /// </summary> /// <param name="contextUrl">Absolute Url of the SharePoint site</param> /// <param name="scope">Space-delimited permissions to request from the SharePoint site in "shorthand" format /// (e.g. "Web.Read Site.Write")</param> /// <returns>Url of the SharePoint site's OAuth authorization page</returns> public static string GetAuthorizationUrl(string contextUrl, string scope) { return string.Format( "{0}{1}?IsDlg=1&client_id={2}&scope={3}&response_type=code", EnsureTrailingSlash(contextUrl), AuthorizationPage, ClientId, scope); } /// <summary> /// Returns the SharePoint url to which the app should redirect the browser to request consent and get back /// an authorization code. /// </summary> /// <param name="contextUrl">Absolute Url of the SharePoint site</param> /// <param name="scope">Space-delimited permissions to request from the SharePoint site in "shorthand" format /// (e.g. "Web.Read Site.Write")</param> /// <param name="redirectUri">Uri to which SharePoint should redirect the browser to after consent is /// granted</param> /// <returns>Url of the SharePoint site's OAuth authorization page</returns> public static string GetAuthorizationUrl(string contextUrl, string scope, string redirectUri) { return string.Format( "{0}{1}?IsDlg=1&client_id={2}&scope={3}&response_type=code&redirect_uri={4}", EnsureTrailingSlash(contextUrl), AuthorizationPage, ClientId, scope, redirectUri); } /// <summary> /// Returns the SharePoint url to which the app should redirect the browser to request a new context token. /// </summary> /// <param name="contextUrl">Absolute Url of the SharePoint site</param> /// <param name="redirectUri">Uri to which SharePoint should redirect the browser to with a context token</param> /// <returns>Url of the SharePoint site's context token redirect page</returns> public static string GetAppContextTokenRequestUrl(string contextUrl, string redirectUri) { return string.Format( "{0}{1}?client_id={2}&redirect_uri={3}", EnsureTrailingSlash(contextUrl), RedirectPage, ClientId, redirectUri); } /// <summary> /// Retrieves an S2S access token signed by the application's private certificate on behalf of the specified /// WindowsIdentity and intended for the SharePoint at the targetApplicationUri. If no Realm is specified in /// web.config, an auth challenge will be issued to the targetApplicationUri to discover it. /// </summary> /// <param name="targetApplicationUri">Url of the target SharePoint site</param> /// <param name="identity">Windows identity of the user on whose behalf to create the access token</param> /// <returns>An access token with an audience of the target principal</returns> public static string GetS2SAccessTokenWithWindowsIdentity( Uri targetApplicationUri, WindowsIdentity identity) { string realm = string.IsNullOrEmpty(Realm) ? GetRealmFromTargetUrl(targetApplicationUri) : Realm; JsonWebTokenClaim[] claims = identity != null ? GetClaimsWithWindowsIdentity(identity) : null; return GetS2SAccessTokenWithClaims(targetApplicationUri.Authority, realm, claims); } /// <summary> /// Retrieves an S2S client context with an access token signed by the application's private certificate on /// behalf of the specified WindowsIdentity and intended for application at the targetApplicationUri using the /// targetRealm. If no Realm is specified in web.config, an auth challenge will be issued to the /// targetApplicationUri to discover it. /// </summary> /// <param name="targetApplicationUri">Url of the target SharePoint site</param> /// <param name="identity">Windows identity of the user on whose behalf to create the access token</param> /// <returns>A ClientContext using an access token with an audience of the target application</returns> public static ClientContext GetS2SClientContextWithWindowsIdentity( Uri targetApplicationUri, WindowsIdentity identity) { string realm = string.IsNullOrEmpty(Realm) ? GetRealmFromTargetUrl(targetApplicationUri) : Realm; JsonWebTokenClaim[] claims = identity != null ? GetClaimsWithWindowsIdentity(identity) : null; string accessToken = GetS2SAccessTokenWithClaims(targetApplicationUri.Authority, realm, claims); return GetClientContextWithAccessToken(targetApplicationUri.ToString(), accessToken); } /// <summary> /// Get authentication realm from SharePoint /// </summary> /// <param name="targetApplicationUri">Url of the target SharePoint site</param> /// <returns>String representation of the realm GUID</returns> public static string GetRealmFromTargetUrl(Uri targetApplicationUri) { WebRequest request = WebRequest.Create(targetApplicationUri + "/_vti_bin/client.svc"); request.Headers.Add("Authorization: Bearer "); try { using (request.GetResponse()) { } } catch (WebException e) { if (e.Response == null) { return null; } string bearerResponseHeader = e.Response.Headers["WWW-Authenticate"]; if (string.IsNullOrEmpty(bearerResponseHeader)) { return null; } const string bearer = "Bearer realm=\""; int bearerIndex = bearerResponseHeader.IndexOf(bearer, StringComparison.Ordinal); if (bearerIndex < 0) { return null; } int realmIndex = bearerIndex + bearer.Length; if (bearerResponseHeader.Length >= realmIndex + 36) { string targetRealm = bearerResponseHeader.Substring(realmIndex, 36); Guid realmGuid; if (Guid.TryParse(targetRealm, out realmGuid)) { return targetRealm; } } } return null; } /// <summary> /// Determines if this is a high trust app. /// </summary> /// <returns>True if this is a high trust app.</returns> public static bool IsHighTrustApp() { return SigningCredentials != null; } /// <summary> /// Ensures that the specified URL ends with '/' if it is not null or empty. /// </summary> /// <param name="url">The url.</param> /// <returns>The url ending with '/' if it is not null or empty.</returns> public static string EnsureTrailingSlash(string url) { if (!string.IsNullOrEmpty(url) && url[url.Length - 1] != '/') { return url + "/"; } return url; } #endregion #region private fields // // Configuration Constants // private const string AuthorizationPage = "_layouts/15/OAuthAuthorize.aspx"; private const string RedirectPage = "_layouts/15/AppRedirect.aspx"; private const string AcsPrincipalName = "00000001-0000-0000-c000-000000000000"; private const string AcsMetadataEndPointRelativeUrl = "metadata/json/1"; private const string S2SProtocol = "OAuth2"; private const string DelegationIssuance = "DelegationIssuance1.0"; private const string NameIdentifierClaimType = JsonWebTokenConstants.ReservedClaims.NameIdentifier; private const string TrustedForImpersonationClaimType = "trustedfordelegation"; private const string ActorTokenClaimType = JsonWebTokenConstants.ReservedClaims.ActorToken; // // Environment Constants // private static string GlobalEndPointPrefix = "accounts"; private static string AcsHostUrl = "accesscontrol.windows.net"; // // Hosted app configuration // private static readonly string ClientId = string.IsNullOrEmpty(WebConfigurationManager.AppSettings.Get("ClientId")) ? WebConfigurationManager.AppSettings.Get("HostedAppName") : WebConfigurationManager.AppSettings.Get("ClientId"); private static readonly string IssuerId = string.IsNullOrEmpty(WebConfigurationManager.AppSettings.Get("IssuerId")) ? ClientId : WebConfigurationManager.AppSettings.Get("IssuerId"); private static readonly string HostedAppHostNameOverride = WebConfigurationManager.AppSettings.Get("HostedAppHostNameOverride"); private static readonly string HostedAppHostName = WebConfigurationManager.AppSettings.Get("HostedAppHostName"); private static readonly string ClientSecret = string.IsNullOrEmpty(WebConfigurationManager.AppSettings.Get("ClientSecret")) ? WebConfigurationManager.AppSettings.Get("HostedAppSigningKey") : WebConfigurationManager.AppSettings.Get("ClientSecret"); private static readonly string SecondaryClientSecret = WebConfigurationManager.AppSettings.Get("SecondaryClientSecret"); private static readonly string Realm = WebConfigurationManager.AppSettings.Get("Realm"); private static readonly string ServiceNamespace = WebConfigurationManager.AppSettings.Get("Realm"); private static readonly string ClientSigningCertificatePath = WebConfigurationManager.AppSettings.Get("ClientSigningCertificatePath"); private static readonly string ClientSigningCertificatePassword = WebConfigurationManager.AppSettings.Get("ClientSigningCertificatePassword"); private static readonly X509Certificate2 ClientCertificate = (string.IsNullOrEmpty(ClientSigningCertificatePath) || string.IsNullOrEmpty(ClientSigningCertificatePassword)) ? null : new X509Certificate2(ClientSigningCertificatePath, ClientSigningCertificatePassword); private static readonly X509SigningCredentials SigningCredentials = (ClientCertificate == null) ? null : new X509SigningCredentials(ClientCertificate, SecurityAlgorithms.RsaSha256Signature, SecurityAlgorithms.Sha256Digest); #endregion #region private methods private static ClientContext CreateAcsClientContextForUrl(SPRemoteEventProperties properties, Uri sharepointUrl) { string contextTokenString = properties.ContextToken; if (String.IsNullOrEmpty(contextTokenString)) { return null; } SharePointContextToken contextToken = ReadAndValidateContextToken(contextTokenString, OperationContext.Current.IncomingMessageHeaders.To.Host); string accessToken = GetAccessToken(contextToken, sharepointUrl.Authority).AccessToken; return GetClientContextWithAccessToken(sharepointUrl.ToString(), accessToken); } private static string GetAcsMetadataEndpointUrl() { return Path.Combine(GetAcsGlobalEndpointUrl(), AcsMetadataEndPointRelativeUrl); } private static string GetFormattedPrincipal(string principalName, string hostName, string realm) { if (!String.IsNullOrEmpty(hostName)) { return String.Format(CultureInfo.InvariantCulture, "{0}/{1}@{2}", principalName, hostName, realm); } return String.Format(CultureInfo.InvariantCulture, "{0}@{1}", principalName, realm); } private static string GetAcsPrincipalName(string realm) { return GetFormattedPrincipal(AcsPrincipalName, new Uri(GetAcsGlobalEndpointUrl()).Host, realm); } private static string GetAcsGlobalEndpointUrl() { return String.Format(CultureInfo.InvariantCulture, "https://{0}.{1}/", GlobalEndPointPrefix, AcsHostUrl); } private static JsonWebSecurityTokenHandler CreateJsonWebSecurityTokenHandler() { JsonWebSecurityTokenHandler handler = new JsonWebSecurityTokenHandler(); handler.Configuration = new SecurityTokenHandlerConfiguration(); handler.Configuration.AudienceRestriction = new AudienceRestriction(AudienceUriMode.Never); handler.Configuration.CertificateValidator = X509CertificateValidator.None; List<byte[]> securityKeys = new List<byte[]>(); securityKeys.Add(Convert.FromBase64String(ClientSecret)); if (!string.IsNullOrEmpty(SecondaryClientSecret)) { securityKeys.Add(Convert.FromBase64String(SecondaryClientSecret)); } List<SecurityToken> securityTokens = new List<SecurityToken>(); securityTokens.Add(new MultipleSymmetricKeySecurityToken(securityKeys)); handler.Configuration.IssuerTokenResolver = SecurityTokenResolver.CreateDefaultSecurityTokenResolver( new ReadOnlyCollection<SecurityToken>(securityTokens), false); SymmetricKeyIssuerNameRegistry issuerNameRegistry = new SymmetricKeyIssuerNameRegistry(); foreach (byte[] securitykey in securityKeys) { issuerNameRegistry.AddTrustedIssuer(securitykey, GetAcsPrincipalName(ServiceNamespace)); } handler.Configuration.IssuerNameRegistry = issuerNameRegistry; return handler; } private static string GetS2SAccessTokenWithClaims( string targetApplicationHostName, string targetRealm, IEnumerable<JsonWebTokenClaim> claims) { return IssueToken( ClientId, IssuerId, targetRealm, SharePointPrincipal, targetRealm, targetApplicationHostName, true, claims, claims == null); } private static JsonWebTokenClaim[] GetClaimsWithWindowsIdentity(WindowsIdentity identity) { JsonWebTokenClaim[] claims = new JsonWebTokenClaim[] { new JsonWebTokenClaim(NameIdentifierClaimType, identity.User.Value.ToLower()), new JsonWebTokenClaim("nii", "urn:office:idp:activedirectory") }; return claims; } private static string IssueToken( string sourceApplication, string issuerApplication, string sourceRealm, string targetApplication, string targetRealm, string targetApplicationHostName, bool trustedForDelegation, IEnumerable<JsonWebTokenClaim> claims, bool appOnly = false) { if (null == SigningCredentials) { throw new InvalidOperationException("SigningCredentials was not initialized"); } #region Actor token string issuer = string.IsNullOrEmpty(sourceRealm) ? issuerApplication : string.Format("{0}@{1}", issuerApplication, sourceRealm); string nameid = string.IsNullOrEmpty(sourceRealm) ? sourceApplication : string.Format("{0}@{1}", sourceApplication, sourceRealm); string audience = string.Format("{0}/{1}@{2}", targetApplication, targetApplicationHostName, targetRealm); List<JsonWebTokenClaim> actorClaims = new List<JsonWebTokenClaim>(); actorClaims.Add(new JsonWebTokenClaim(JsonWebTokenConstants.ReservedClaims.NameIdentifier, nameid)); if (trustedForDelegation && !appOnly) { actorClaims.Add(new JsonWebTokenClaim(TrustedForImpersonationClaimType, "true")); } // Create token JsonWebSecurityToken actorToken = new JsonWebSecurityToken( issuer: issuer, audience: audience, validFrom: DateTime.UtcNow, validTo: DateTime.UtcNow.Add(HighTrustAccessTokenLifetime), signingCredentials: SigningCredentials, claims: actorClaims); string actorTokenString = new JsonWebSecurityTokenHandler().WriteTokenAsString(actorToken); if (appOnly) { // App-only token is the same as actor token for delegated case return actorTokenString; } #endregion Actor token #region Outer token List<JsonWebTokenClaim> outerClaims = null == claims ? new List<JsonWebTokenClaim>() : new List<JsonWebTokenClaim>(claims); outerClaims.Add(new JsonWebTokenClaim(ActorTokenClaimType, actorTokenString)); JsonWebSecurityToken jsonToken = new JsonWebSecurityToken( nameid, // outer token issuer should match actor token nameid audience, DateTime.UtcNow, DateTime.UtcNow.Add(HighTrustAccessTokenLifetime), outerClaims); string accessToken = new JsonWebSecurityTokenHandler().WriteTokenAsString(jsonToken); #endregion Outer token return accessToken; } #endregion #region AcsMetadataParser // This class is used to get MetaData document from the global STS endpoint. It contains // methods to parse the MetaData document and get endpoints and STS certificate. public static class AcsMetadataParser { public static X509Certificate2 GetAcsSigningCert(string realm) { JsonMetadataDocument document = GetMetadataDocument(realm); if (null != document.keys && document.keys.Count > 0) { JsonKey signingKey = document.keys[0]; if (null != signingKey && null != signingKey.keyValue) { return new X509Certificate2(Encoding.UTF8.GetBytes(signingKey.keyValue.value)); } } throw new Exception("Metadata document does not contain ACS signing certificate."); } public static string GetDelegationServiceUrl(string realm) { JsonMetadataDocument document = GetMetadataDocument(realm); JsonEndpoint delegationEndpoint = document.endpoints.SingleOrDefault(e => e.protocol == DelegationIssuance); if (null != delegationEndpoint) { return delegationEndpoint.location; } throw new Exception("Metadata document does not contain Delegation Service endpoint Url"); } private static JsonMetadataDocument GetMetadataDocument(string realm) { string acsMetadataEndpointUrlWithRealm = String.Format(CultureInfo.InvariantCulture, "{0}?realm={1}", GetAcsMetadataEndpointUrl(), realm); byte[] acsMetadata; using (WebClient webClient = new WebClient()) { acsMetadata = webClient.DownloadData(acsMetadataEndpointUrlWithRealm); } string jsonResponseString = Encoding.UTF8.GetString(acsMetadata); JavaScriptSerializer serializer = new JavaScriptSerializer(); JsonMetadataDocument document = serializer.Deserialize<JsonMetadataDocument>(jsonResponseString); if (null == document) { throw new Exception("No metadata document found at the global endpoint " + acsMetadataEndpointUrlWithRealm); } return document; } public static string GetStsUrl(string realm) { JsonMetadataDocument document = GetMetadataDocument(realm); JsonEndpoint s2sEndpoint = document.endpoints.SingleOrDefault(e => e.protocol == S2SProtocol); if (null != s2sEndpoint) { return s2sEndpoint.location; } throw new Exception("Metadata document does not contain STS endpoint url"); } private class JsonMetadataDocument { public string serviceName { get; set; } public List<JsonEndpoint> endpoints { get; set; } public List<JsonKey> keys { get; set; } } private class JsonEndpoint { public string location { get; set; } public string protocol { get; set; } public string usage { get; set; } } private class JsonKeyValue { public string type { get; set; } public string value { get; set; } } private class JsonKey { public string usage { get; set; } public JsonKeyValue keyValue { get; set; } } } #endregion } /// <summary> /// A JsonWebSecurityToken generated by SharePoint to authenticate to a 3rd party application and allow callbacks using a refresh token /// </summary> public class SharePointContextToken : JsonWebSecurityToken { public static SharePointContextToken Create(JsonWebSecurityToken contextToken) { return new SharePointContextToken(contextToken.Issuer, contextToken.Audience, contextToken.ValidFrom, contextToken.ValidTo, contextToken.Claims); } public SharePointContextToken(string issuer, string audience, DateTime validFrom, DateTime validTo, IEnumerable<JsonWebTokenClaim> claims) : base(issuer, audience, validFrom, validTo, claims) { } public SharePointContextToken(string issuer, string audience, DateTime validFrom, DateTime validTo, IEnumerable<JsonWebTokenClaim> claims, SecurityToken issuerToken, JsonWebSecurityToken actorToken) : base(issuer, audience, validFrom, validTo, claims, issuerToken, actorToken) { } public SharePointContextToken(string issuer, string audience, DateTime validFrom, DateTime validTo, IEnumerable<JsonWebTokenClaim> claims, SigningCredentials signingCredentials) : base(issuer, audience, validFrom, validTo, claims, signingCredentials) { } public string NameId { get { return GetClaimValue(this, "nameid"); } } /// <summary> /// The principal name portion of the context token's "appctxsender" claim /// </summary> public string TargetPrincipalName { get { string appctxsender = GetClaimValue(this, "appctxsender"); if (appctxsender == null) { return null; } return appctxsender.Split('@')[0]; } } /// <summary> /// The context token's "refreshtoken" claim /// </summary> public string RefreshToken { get { return GetClaimValue(this, "refreshtoken"); } } /// <summary> /// The context token's "CacheKey" claim /// </summary> public string CacheKey { get { string appctx = GetClaimValue(this, "appctx"); if (appctx == null) { return null; } ClientContext ctx = new ClientContext("http://tempuri.org"); Dictionary<string, object> dict = (Dictionary<string, object>)ctx.ParseObjectFromJsonString(appctx); string cacheKey = (string)dict["CacheKey"]; return cacheKey; } } /// <summary> /// The context token's "SecurityTokenServiceUri" claim /// </summary> public string SecurityTokenServiceUri { get { string appctx = GetClaimValue(this, "appctx"); if (appctx == null) { return null; } ClientContext ctx = new ClientContext("http://tempuri.org"); Dictionary<string, object> dict = (Dictionary<string, object>)ctx.ParseObjectFromJsonString(appctx); string securityTokenServiceUri = (string)dict["SecurityTokenServiceUri"]; return securityTokenServiceUri; } } /// <summary> /// The realm portion of the context token's "audience" claim /// </summary> public string Realm { get { string aud = Audience; if (aud == null) { return null; } string tokenRealm = aud.Substring(aud.IndexOf('@') + 1); return tokenRealm; } } private static string GetClaimValue(JsonWebSecurityToken token, string claimType) { if (token == null) { throw new ArgumentNullException("token"); } foreach (JsonWebTokenClaim claim in token.Claims) { if (StringComparer.Ordinal.Equals(claim.ClaimType, claimType)) { return claim.Value; } } return null; } } /// <summary> /// Represents a security token which contains multiple security keys that are generated using symmetric algorithms. /// </summary> public class MultipleSymmetricKeySecurityToken : SecurityToken { /// <summary> /// Initializes a new instance of the MultipleSymmetricKeySecurityToken class. /// </summary> /// <param name="keys">An enumeration of Byte arrays that contain the symmetric keys.</param> public MultipleSymmetricKeySecurityToken(IEnumerable<byte[]> keys) : this(UniqueId.CreateUniqueId(), keys) { } /// <summary> /// Initializes a new instance of the MultipleSymmetricKeySecurityToken class. /// </summary> /// <param name="tokenId">The unique identifier of the security token.</param> /// <param name="keys">An enumeration of Byte arrays that contain the symmetric keys.</param> public MultipleSymmetricKeySecurityToken(string tokenId, IEnumerable<byte[]> keys) { if (keys == null) { throw new ArgumentNullException("keys"); } if (String.IsNullOrEmpty(tokenId)) { throw new ArgumentException("Value cannot be a null or empty string.", "tokenId"); } foreach (byte[] key in keys) { if (key.Length <= 0) { throw new ArgumentException("The key length must be greater then zero.", "keys"); } } id = tokenId; effectiveTime = DateTime.UtcNow; securityKeys = CreateSymmetricSecurityKeys(keys); } /// <summary> /// Gets the unique identifier of the security token. /// </summary> public override string Id { get { return id; } } /// <summary> /// Gets the cryptographic keys associated with the security token. /// </summary> public override ReadOnlyCollection<SecurityKey> SecurityKeys { get { return securityKeys.AsReadOnly(); } } /// <summary> /// Gets the first instant in time at which this security token is valid. /// </summary> public override DateTime ValidFrom { get { return effectiveTime; } } /// <summary> /// Gets the last instant in time at which this security token is valid. /// </summary> public override DateTime ValidTo { get { // Never expire return DateTime.MaxValue; } } /// <summary> /// Returns a value that indicates whether the key identifier for this instance can be resolved to the specified key identifier. /// </summary> /// <param name="keyIdentifierClause">A SecurityKeyIdentifierClause to compare to this instance</param> /// <returns>true if keyIdentifierClause is a SecurityKeyIdentifierClause and it has the same unique identifier as the Id property; otherwise, false.</returns> public override bool MatchesKeyIdentifierClause(SecurityKeyIdentifierClause keyIdentifierClause) { if (keyIdentifierClause == null) { throw new ArgumentNullException("keyIdentifierClause"); } // Since this is a symmetric token and we do not have IDs to distinguish tokens, we just check for the // presence of a SymmetricIssuerKeyIdentifier. The actual mapping to the issuer takes place later // when the key is matched to the issuer. if (keyIdentifierClause is SymmetricIssuerKeyIdentifierClause) { return true; } return base.MatchesKeyIdentifierClause(keyIdentifierClause); } #region private members private List<SecurityKey> CreateSymmetricSecurityKeys(IEnumerable<byte[]> keys) { List<SecurityKey> symmetricKeys = new List<SecurityKey>(); foreach (byte[] key in keys) { symmetricKeys.Add(new InMemorySymmetricSecurityKey(key)); } return symmetricKeys; } private string id; private DateTime effectiveTime; private List<SecurityKey> securityKeys; #endregion } }
using UnityEngine; using UnityEditor; using System; using System.Linq; using System.IO; using System.Collections.Generic; using System.Text.RegularExpressions; #if UNITY_5_5_OR_NEWER using UnityEngine.Profiling; #endif using UnityEngine.AssetBundles.GraphTool; using Model=UnityEngine.AssetBundles.GraphTool.DataModel.Version2; namespace UnityEngine.AssetBundles.GraphTool { [CustomNode("Configure Bundle/Extract Shared Assets", 71)] public class ExtractSharedAssets : Node { enum GroupingType : int { ByFileSize, ByRuntimeMemorySize }; [SerializeField] private string m_bundleNameTemplate; [SerializeField] private SerializableMultiTargetInt m_groupExtractedAssets; [SerializeField] private SerializableMultiTargetInt m_groupSizeByte; [SerializeField] private SerializableMultiTargetInt m_groupingType; public override string ActiveStyle { get { return "node 3 on"; } } public override string InactiveStyle { get { return "node 3"; } } public override string Category { get { return "Configure"; } } public override Model.NodeOutputSemantics NodeInputType { get { return Model.NodeOutputSemantics.AssetBundleConfigurations; } } public override Model.NodeOutputSemantics NodeOutputType { get { return Model.NodeOutputSemantics.AssetBundleConfigurations; } } public override void Initialize(Model.NodeData data) { m_bundleNameTemplate = "shared_*"; m_groupExtractedAssets = new SerializableMultiTargetInt(); m_groupSizeByte = new SerializableMultiTargetInt(); m_groupingType = new SerializableMultiTargetInt(); data.AddDefaultInputPoint(); data.AddDefaultOutputPoint(); } public override Node Clone(Model.NodeData newData) { var newNode = new ExtractSharedAssets(); newNode.m_groupExtractedAssets = new SerializableMultiTargetInt(m_groupExtractedAssets); newNode.m_groupSizeByte = new SerializableMultiTargetInt(m_groupSizeByte); newNode.m_groupingType = new SerializableMultiTargetInt(m_groupingType); newNode.m_bundleNameTemplate = m_bundleNameTemplate; newData.AddDefaultInputPoint(); newData.AddDefaultOutputPoint(); return newNode; } public override void OnInspectorGUI(NodeGUI node, AssetReferenceStreamManager streamManager, NodeGUIEditor editor, Action onValueChanged) { EditorGUILayout.HelpBox("Extract Shared Assets: Extract shared assets between asset bundles and add bundle configurations.", MessageType.Info); editor.UpdateNodeName(node); GUILayout.Space(10f); var newValue = EditorGUILayout.TextField("Bundle Name Template", m_bundleNameTemplate); if(newValue != m_bundleNameTemplate) { using(new RecordUndoScope("Bundle Name Template Change", node, true)) { m_bundleNameTemplate = newValue; onValueChanged(); } } GUILayout.Space(10f); //Show target configuration tab editor.DrawPlatformSelector(node); using (new EditorGUILayout.VerticalScope(GUI.skin.box)) { var disabledScope = editor.DrawOverrideTargetToggle(node, m_groupSizeByte.ContainsValueOf(editor.CurrentEditingGroup), (bool enabled) => { using(new RecordUndoScope("Remove Target Grouping Size Settings", node, true)){ if(enabled) { m_groupExtractedAssets[editor.CurrentEditingGroup] = m_groupExtractedAssets.DefaultValue; m_groupSizeByte[editor.CurrentEditingGroup] = m_groupSizeByte.DefaultValue; m_groupingType[editor.CurrentEditingGroup] = m_groupingType.DefaultValue; } else { m_groupExtractedAssets.Remove(editor.CurrentEditingGroup); m_groupSizeByte.Remove(editor.CurrentEditingGroup); m_groupingType.Remove(editor.CurrentEditingGroup); } onValueChanged(); } }); using (disabledScope) { var useGroup = EditorGUILayout.ToggleLeft ("Subgroup shared assets by size", m_groupExtractedAssets [editor.CurrentEditingGroup] != 0); if (useGroup != (m_groupExtractedAssets [editor.CurrentEditingGroup] != 0)) { using(new RecordUndoScope("Change Grouping Type", node, true)){ m_groupExtractedAssets[editor.CurrentEditingGroup] = (useGroup)? 1:0; onValueChanged(); } } using (new EditorGUI.DisabledScope (!useGroup)) { var newType = (GroupingType)EditorGUILayout.EnumPopup("Grouping Type",(GroupingType)m_groupingType[editor.CurrentEditingGroup]); if (newType != (GroupingType)m_groupingType[editor.CurrentEditingGroup]) { using(new RecordUndoScope("Change Grouping Type", node, true)){ m_groupingType[editor.CurrentEditingGroup] = (int)newType; onValueChanged(); } } var newSizeText = EditorGUILayout.TextField("Size(KB)",m_groupSizeByte[editor.CurrentEditingGroup].ToString()); int newSize = 0; Int32.TryParse (newSizeText, out newSize); if (newSize != m_groupSizeByte[editor.CurrentEditingGroup]) { using(new RecordUndoScope("Change Grouping Size", node, true)){ m_groupSizeByte[editor.CurrentEditingGroup] = newSize; onValueChanged(); } } } } } EditorGUILayout.HelpBox("Bundle Name Template replaces \'*\' with number.", MessageType.Info); } /** * Prepare is called whenever graph needs update. */ public override void Prepare (BuildTarget target, Model.NodeData node, IEnumerable<PerformGraph.AssetGroups> incoming, IEnumerable<Model.ConnectionData> connectionsToOutput, PerformGraph.Output Output) { if(string.IsNullOrEmpty(m_bundleNameTemplate)) { throw new NodeException(node.Name + ":Bundle Name Template is empty.", node.Id); } if (m_groupExtractedAssets [target] != 0) { if(m_groupSizeByte[target] < 0) { throw new NodeException("Invalid size. Size property must be a positive number.", node.Id); } } // Pass incoming assets straight to Output if(Output != null) { var destination = (connectionsToOutput == null || !connectionsToOutput.Any())? null : connectionsToOutput.First(); if(incoming != null) { var buildMap = AssetBundleBuildMap.GetBuildMap (); buildMap.ClearFromId (node.Id); var dependencyCollector = new Dictionary<string, List<string>>(); // [asset path:group name] var sharedDependency = new Dictionary<string, List<AssetReference>>(); var groupNameMap = new Dictionary<string, string>(); // build dependency map foreach(var ag in incoming) { foreach (var key in ag.assetGroups.Keys) { var assets = ag.assetGroups[key]; foreach(var a in assets) { CollectDependencies(key, new string[] { a.importFrom }, ref dependencyCollector); } } } foreach(var entry in dependencyCollector) { if(entry.Value != null && entry.Value.Count > 1) { var joinedName = string.Join("-", entry.Value.ToArray()); if(!groupNameMap.ContainsKey(joinedName)) { var count = groupNameMap.Count; var newName = m_bundleNameTemplate.Replace("*", count.ToString()); if(newName == m_bundleNameTemplate) { newName = m_bundleNameTemplate + count.ToString(); } groupNameMap.Add(joinedName, newName); } var groupName = groupNameMap[joinedName]; if(!sharedDependency.ContainsKey(groupName)) { sharedDependency[groupName] = new List<AssetReference>(); } sharedDependency[groupName].Add( AssetReference.CreateReference(entry.Key) ); } } if(sharedDependency.Keys.Count > 0) { // subgroup shared dependency bundles by size if (m_groupExtractedAssets [target] != 0) { List<string> devidingBundleNames = new List<string> (sharedDependency.Keys); long szGroup = m_groupSizeByte[target] * 1000; foreach(var bundleName in devidingBundleNames) { var assets = sharedDependency[bundleName]; int groupCount = 0; long szGroupCount = 0; foreach(var a in assets) { var subGroupName = string.Format ("{0}_{1}", bundleName, groupCount); if (!sharedDependency.ContainsKey(subGroupName)) { sharedDependency[subGroupName] = new List<AssetReference>(); } sharedDependency[subGroupName].Add(a); szGroupCount += GetSizeOfAsset(a, (GroupingType)m_groupingType[target]); if(szGroupCount >= szGroup) { szGroupCount = 0; ++groupCount; } } sharedDependency.Remove (bundleName); } } foreach(var bundleName in sharedDependency.Keys) { var bundleConfig = buildMap.GetAssetBundleWithNameAndVariant (node.Id, bundleName, string.Empty); bundleConfig.AddAssets (node.Id, sharedDependency[bundleName].Select(a => a.importFrom)); } foreach(var ag in incoming) { Output(destination, new Dictionary<string, List<AssetReference>>(ag.assetGroups)); } Output(destination, sharedDependency); } else { foreach(var ag in incoming) { Output(destination, ag.assetGroups); } } } else { // Overwrite output with empty Dictionary when no there is incoming asset Output(destination, new Dictionary<string, List<AssetReference>>()); } } } private void CollectDependencies(string groupKey, string[] assetPaths, ref Dictionary<string, List<string>> collector) { var dependencies = AssetDatabase.GetDependencies(assetPaths); foreach(var d in dependencies) { // AssetBundle must not include script asset if (TypeUtility.GetTypeOfAsset (d) == typeof(MonoScript)) { continue; } if(!collector.ContainsKey(d)) { collector[d] = new List<string>(); } if(!collector[d].Contains(groupKey)) { collector[d].Add(groupKey); collector[d].Sort(); } } } private long GetSizeOfAsset(AssetReference a, GroupingType t) { long size = 0; // You can not read scene and do estimate if (TypeUtility.GetTypeOfAsset (a.importFrom) == typeof(UnityEditor.SceneAsset)) { t = GroupingType.ByFileSize; } if (t == GroupingType.ByRuntimeMemorySize) { var objects = a.allData; foreach (var o in objects) { #if UNITY_5_6_OR_NEWER size += Profiler.GetRuntimeMemorySizeLong (o); #else size += Profiler.GetRuntimeMemorySize(o); #endif } a.ReleaseData (); } else if (t == GroupingType.ByFileSize) { System.IO.FileInfo fileInfo = new System.IO.FileInfo(a.absolutePath); if (fileInfo.Exists) { size = fileInfo.Length; } } return size; } } }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for Additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ using NPOI.SS.UserModel.Charts; using System; using NPOI.OpenXmlFormats.Dml.Chart; namespace NPOI.XSSF.UserModel.Charts { /** * Base class for all axis types. * * @author Roman Kashitsyn */ public abstract class XSSFChartAxis : IChartAxis { protected XSSFChart chart; private static double Min_LOG_BASE = 2.0; private static double Max_LOG_BASE = 1000.0; protected XSSFChartAxis(XSSFChart chart) { this.chart = chart; } public abstract long Id { get;} public abstract void CrossAxis(IChartAxis axis); public AxisPosition Position { get { return toAxisPosition(GetCTAxPos()); } set { GetCTAxPos().val = fromAxisPosition(value); } } public String NumberFormat { get { return GetCTNumFmt().formatCode; } set { GetCTNumFmt().formatCode = value; GetCTNumFmt().sourceLinked = true; } } public bool IsSetLogBase { get { return GetCTScaling().IsSetLogBase(); } } public double LogBase { get { CT_LogBase logBase = GetCTScaling().logBase; if (logBase != null) { return logBase.val; } return 0.0; } set { if (value < Min_LOG_BASE || Max_LOG_BASE < value) { throw new ArgumentException("Axis log base must be between 2 and 1000 (inclusive), got: " + value); } CT_Scaling scaling = GetCTScaling(); if (scaling.IsSetLogBase()) { scaling.logBase.val = value; } else { scaling.AddNewLogBase().val = (value); } } } public bool IsSetMinimum { get { return GetCTScaling().IsSetMin(); } } public double Minimum { get { CT_Scaling scaling = GetCTScaling(); if (scaling.IsSetMin()) { return scaling.min.val; } else { return 0.0; } } set { CT_Scaling scaling = GetCTScaling(); if (scaling.IsSetMin()) { scaling.min.val = value; } else { scaling.AddNewMin().val = value; } } } public bool IsSetMaximum { get { return GetCTScaling().IsSetMax(); } } public double Maximum { get { CT_Scaling scaling = GetCTScaling(); if (scaling.IsSetMax()) { return scaling.max.val; } else { return 0.0; } } set { CT_Scaling scaling = GetCTScaling(); if (scaling.IsSetMax()) { scaling.max.val = value; } else { scaling.AddNewMax().val = value; } } } public AxisOrientation Orientation { get { return toAxisOrientation(GetCTScaling().orientation); } set { CT_Scaling scaling = GetCTScaling(); ST_Orientation stOrientation = fromAxisOrientation(value); if (scaling.IsSetOrientation()) { scaling.orientation.val = stOrientation; } else { GetCTScaling().AddNewOrientation().val = stOrientation; } } } public AxisCrosses Crosses { get { return toAxisCrosses(GetCTCrosses()); } set { GetCTCrosses().val = fromAxisCrosses(value); } } public bool IsVisible { get { return GetDelete().val == 0; } set { GetDelete().val = value ? 0 : 1; } } public AxisTickMark MajorTickMark { get { return toAxisTickMark(GetMajorCTTickMark()); } set { GetMajorCTTickMark().val = fromAxisTickMark(value); } } public AxisTickMark MinorTickMark { get { return toAxisTickMark(GetMinorCTTickMark()); } set { GetMinorCTTickMark().val = fromAxisTickMark(value); } } protected abstract CT_AxPos GetCTAxPos(); protected abstract CT_NumFmt GetCTNumFmt(); protected abstract CT_Scaling GetCTScaling(); protected abstract CT_Crosses GetCTCrosses(); protected abstract CT_Boolean GetDelete(); protected abstract CT_TickMark GetMajorCTTickMark(); protected abstract CT_TickMark GetMinorCTTickMark(); private static ST_Orientation fromAxisOrientation(AxisOrientation orientation) { switch (orientation) { case AxisOrientation.MinToMax: return ST_Orientation.minMax; case AxisOrientation.MaxToMin: return ST_Orientation.maxMin; default: throw new ArgumentException(); } } private static AxisOrientation toAxisOrientation(CT_Orientation ctOrientation) { switch (ctOrientation.val) { case ST_Orientation.minMax: return AxisOrientation.MinToMax; case ST_Orientation.maxMin: return AxisOrientation.MaxToMin; default: throw new ArgumentException(); } } private static ST_Crosses fromAxisCrosses(AxisCrosses crosses) { switch (crosses) { case AxisCrosses.AutoZero: return ST_Crosses.autoZero; case AxisCrosses.Min: return ST_Crosses.min; case AxisCrosses.Max: return ST_Crosses.max; default: throw new ArgumentException(); } } private static AxisCrosses toAxisCrosses(CT_Crosses ctCrosses) { switch (ctCrosses.val) { case ST_Crosses.autoZero: return AxisCrosses.AutoZero; case ST_Crosses.max: return AxisCrosses.Max; case ST_Crosses.min: return AxisCrosses.Min; default: throw new ArgumentException(); } } private static ST_AxPos fromAxisPosition(AxisPosition position) { switch (position) { case AxisPosition.Bottom: return ST_AxPos.b; case AxisPosition.Left: return ST_AxPos.l; case AxisPosition.Right: return ST_AxPos.r; case AxisPosition.Top: return ST_AxPos.t; default: throw new ArgumentException(); } } private static AxisPosition toAxisPosition(CT_AxPos ctAxPos) { switch (ctAxPos.val) { case ST_AxPos.b: return AxisPosition.Bottom; case ST_AxPos.l: return AxisPosition.Left; case ST_AxPos.r: return AxisPosition.Right; case ST_AxPos.t: return AxisPosition.Top; default: return AxisPosition.Bottom; } } private static ST_TickMark fromAxisTickMark(AxisTickMark tickMark) { switch (tickMark) { case AxisTickMark.None: return ST_TickMark.none; case AxisTickMark.In: return ST_TickMark.@in; case AxisTickMark.Out: return ST_TickMark.@out; case AxisTickMark.Cross: return ST_TickMark.cross; default: throw new ArgumentException("Unknown AxisTickMark: " + tickMark); } } private static AxisTickMark toAxisTickMark(CT_TickMark ctTickMark) { switch (ctTickMark.val) { case ST_TickMark.none: return AxisTickMark.None; case ST_TickMark.@in: return AxisTickMark.In; case ST_TickMark.@out: return AxisTickMark.Out; case ST_TickMark.cross: return AxisTickMark.Cross; default: return AxisTickMark.Cross; } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using System.Threading; namespace Apache.Geode.Client.UnitTests { using NUnit.Framework; using Apache.Geode.DUnitFramework; using Apache.Geode.Client.Tests; using Apache.Geode.Client; [TestFixture] [Category("group2")] [Category("unicast_only")] [Category("generics")] public class ThinClientCqIRTests : ThinClientRegionSteps { #region Private members private UnitProcess m_client1; private UnitProcess m_client2; private static string[] QueryRegionNames = { "Portfolios", "Positions", "Portfolios2", "Portfolios3" }; private static string QERegionName = "Portfolios"; private static string CqName = "MyCq"; #endregion protected override ClientBase[] GetClients() { m_client1 = new UnitProcess(); m_client2 = new UnitProcess(); return new ClientBase[] { m_client1, m_client2 }; } [TestFixtureSetUp] public override void InitTests() { base.InitTests(); m_client1.Call(InitClient); m_client2.Call(InitClient); } [TearDown] public override void EndTest() { CacheHelper.StopJavaServers(); base.EndTest(); } public void InitClient() { CacheHelper.Init(); try { Serializable.RegisterTypeGeneric(Portfolio.CreateDeserializable); Serializable.RegisterTypeGeneric(Position.CreateDeserializable); } catch (IllegalStateException) { // ignore since we run multiple iterations for pool and non pool configs } } public void StepOne(string locators) { CacheHelper.CreateTCRegion_Pool<object, object>(QueryRegionNames[0], true, true, null, locators, "__TESTPOOL1_", true); CacheHelper.CreateTCRegion_Pool<object, object>(QueryRegionNames[1], true, true, null, locators, "__TESTPOOL1_", true); CacheHelper.CreateTCRegion_Pool<object, object>(QueryRegionNames[2], true, true, null, locators, "__TESTPOOL1_", true); CacheHelper.CreateTCRegion_Pool<object, object>(QueryRegionNames[3], true, true, null, locators, "__TESTPOOL1_", true); IRegion<object, object> region = CacheHelper.GetRegion<object, object>(QueryRegionNames[0]); Apache.Geode.Client.RegionAttributes<object, object> regattrs = region.Attributes; region.CreateSubRegion(QueryRegionNames[1], regattrs); } public void StepTwo() { IRegion<object, object> region0 = CacheHelper.GetRegion<object, object>(QueryRegionNames[0]); IRegion<object, object> subRegion0 = region0.GetSubRegion(QueryRegionNames[1]); IRegion<object, object> region1 = CacheHelper.GetRegion<object, object>(QueryRegionNames[1]); IRegion<object, object> region2 = CacheHelper.GetRegion<object, object>(QueryRegionNames[2]); IRegion<object, object> region3 = CacheHelper.GetRegion<object, object>(QueryRegionNames[3]); QueryHelper<object, object> qh = QueryHelper<object, object>.GetHelper(); Util.Log("SetSize {0}, NumSets {1}.", qh.PortfolioSetSize, qh.PortfolioNumSets); qh.PopulatePortfolioData(region0, qh.PortfolioSetSize, qh.PortfolioNumSets); qh.PopulatePositionData(subRegion0, qh.PortfolioSetSize, qh.PortfolioNumSets); qh.PopulatePositionData(region1, qh.PortfolioSetSize, qh.PortfolioNumSets); qh.PopulatePortfolioData(region2, qh.PortfolioSetSize, qh.PortfolioNumSets); qh.PopulatePortfolioData(region3, qh.PortfolioSetSize, qh.PortfolioNumSets); } public void StepTwoQT() { IRegion<object, object> region0 = CacheHelper.GetRegion<object, object>(QueryRegionNames[0]); IRegion<object, object> subRegion0 = region0.GetSubRegion(QueryRegionNames[1]); QueryHelper<object, object> qh = QueryHelper<object, object>.GetHelper(); qh.PopulatePortfolioData(region0, 100, 20, 100); qh.PopulatePositionData(subRegion0, 100, 20); } public void StepOneQE(string locators) { CacheHelper.CreateTCRegion_Pool<object, object>(QERegionName, true, true, null, locators, "__TESTPOOL1_", true); IRegion<object, object> region = CacheHelper.GetVerifyRegion<object, object>(QERegionName); Portfolio p1 = new Portfolio(1, 100); Portfolio p2 = new Portfolio(2, 100); Portfolio p3 = new Portfolio(3, 100); Portfolio p4 = new Portfolio(4, 100); region["1"] = p1; region["2"] = p2; region["3"] = p3; region["4"] = p4; QueryService<object, object> qs = null; qs = PoolManager/*<object, object>*/.Find("__TESTPOOL1_").GetQueryService<object, object>(); CqAttributesFactory<object, object> cqFac = new CqAttributesFactory<object, object>(); ICqListener<object, object> cqLstner = new MyCqListener<object, object>(); cqFac.AddCqListener(cqLstner); CqAttributes<object, object> cqAttr = cqFac.Create(); CqQuery<object, object> qry = qs.NewCq(CqName, "select * from /" + QERegionName + " p where p.ID!=2", cqAttr, false); ICqResults<object> results = qry.ExecuteWithInitialResults(); Thread.Sleep(18000); // sleep 0.3min to allow server c query to complete region["4"] = p1; region["3"] = p2; region["2"] = p3; region["1"] = p4; Thread.Sleep(18000); // sleep 0.3min to allow server c query to complete Util.Log("Results size {0}.", results.Size); SelectResultsIterator<object> iter = results.GetIterator(); while (iter.HasNext) { object item = iter.Next(); if (item != null) { Struct st = item as Struct; string key = st["key"] as string; Assert.IsNotNull(key, "key is null"); Portfolio port = st["value"] as Portfolio; if (port == null) { Position pos = st["value"] as Position; if (pos == null) { string cs = item as string; if (cs == null) { Assert.Fail("value is null"); Util.Log("Query got other/unknown object."); } else { Util.Log("Query got string : {0}.", cs); } } else { Util.Log("Query got Position object with secId {0}, shares {1}.", pos.SecId, pos.SharesOutstanding); } } else { Util.Log("Query got Portfolio object with ID {0}, pkid {1}.", port.ID, port.Pkid); } } } qry = qs.GetCq(CqName); qry.Stop(); qry.Close(); // Bring down the region region.GetLocalView().DestroyRegion(); } void runCqQueryIRTest() { CacheHelper.SetupJavaServers(true, "remotequeryN.xml"); CacheHelper.StartJavaLocator(1, "GFELOC"); Util.Log("Locator started"); CacheHelper.StartJavaServerWithLocators(1, "GFECS1", 1); Util.Log("Cacheserver 1 started."); m_client1.Call(StepOne, CacheHelper.Locators); Util.Log("StepOne complete."); m_client1.Call(StepTwo); Util.Log("StepTwo complete."); m_client1.Call(StepOneQE, CacheHelper.Locators); Util.Log("StepOne complete."); m_client1.Call(Close); CacheHelper.StopJavaServer(1); Util.Log("Cacheserver 1 stopped."); CacheHelper.StopJavaLocator(1); Util.Log("Locator stopped"); } [Test] public void CqQueryIRTest() { runCqQueryIRTest(); } } }
using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Linq; using System.Net; using System.Net.Http.Headers; using System.Reflection; using System.Runtime.CompilerServices; using System.ServiceModel; using System.ServiceModel.Channels; using System.Threading.Tasks; using System.Xml; using System.Xml.Serialization; using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.Http.Features; using Microsoft.AspNetCore.WebUtilities; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; using SoapCore.Extensibility; using SoapCore.MessageEncoder; using SoapCore.Meta; using SoapCore.ServiceModel; namespace SoapCore { public class SoapEndpointMiddleware<T_MESSAGE> where T_MESSAGE : CustomMessage, new() { private readonly ILogger<SoapEndpointMiddleware<T_MESSAGE>> _logger; private readonly RequestDelegate _next; private readonly SoapOptions _options; private readonly ServiceDescription _service; private readonly StringComparison _pathComparisonStrategy; private readonly SoapMessageEncoder[] _messageEncoders; private readonly SerializerHelper _serializerHelper; [Obsolete] public SoapEndpointMiddleware(ILogger<SoapEndpointMiddleware<T_MESSAGE>> logger, RequestDelegate next, Type serviceType, string path, SoapEncoderOptions[] encoderOptions, SoapSerializer serializer, bool caseInsensitivePath, ISoapModelBounder soapModelBounder, Binding binding, bool httpGetEnabled, bool httpsGetEnabled) : this(logger, next, new SoapOptions() { ServiceType = serviceType, Path = path, EncoderOptions = encoderOptions ?? binding?.ToEncoderOptions(), SoapSerializer = serializer, CaseInsensitivePath = caseInsensitivePath, SoapModelBounder = soapModelBounder, UseBasicAuthentication = binding.HasBasicAuth(), HttpGetEnabled = httpGetEnabled, HttpsGetEnabled = httpsGetEnabled }) { } public SoapEndpointMiddleware(ILogger<SoapEndpointMiddleware<T_MESSAGE>> logger, RequestDelegate next, SoapOptions options) { _logger = logger; _next = next; _options = options; _serializerHelper = new SerializerHelper(options.SoapSerializer); _pathComparisonStrategy = options.CaseInsensitivePath ? StringComparison.OrdinalIgnoreCase : StringComparison.Ordinal; _service = new ServiceDescription(options.ServiceType); if (options.EncoderOptions is null) { options.EncoderOptions = new[] { new SoapEncoderOptions() }; } _messageEncoders = new SoapMessageEncoder[options.EncoderOptions.Length]; for (var i = 0; i < options.EncoderOptions.Length; i++) { _messageEncoders[i] = new SoapMessageEncoder(options.EncoderOptions[i].MessageVersion, options.EncoderOptions[i].WriteEncoding, options.EncoderOptions[i].ReaderQuotas, options.OmitXmlDeclaration, options.IndentXml, options.CheckXmlCharacters, options.EncoderOptions[i].XmlNamespaceOverrides, options.EncoderOptions[i].BindingName, options.EncoderOptions[i].PortName); } } public async Task Invoke(HttpContext httpContext, IServiceProvider serviceProvider) { var trailPathTuner = serviceProvider.GetService<TrailingServicePathTuner>(); trailPathTuner?.ConvertPath(httpContext); if (httpContext.Request.Path.StartsWithSegments(_options.Path, _pathComparisonStrategy, out var remainingPath)) { if (httpContext.Request.Method?.ToLower() == "get") { // If GET is not enabled, either for HTTP or HTTPS, return a 403 instead of the WSDL if ((httpContext.Request.IsHttps && !_options.HttpsGetEnabled) || (!httpContext.Request.IsHttps && !_options.HttpGetEnabled)) { httpContext.Response.StatusCode = (int)HttpStatusCode.Forbidden; return; } } try { _logger.LogDebug("Received SOAP Request for {0} ({1} bytes)", httpContext.Request.Path, httpContext.Request.ContentLength ?? 0); if (httpContext.Request.Method?.ToLower() == "get") { if (!string.IsNullOrWhiteSpace(remainingPath)) { httpContext.Response.StatusCode = (int)HttpStatusCode.InternalServerError; await httpContext.Response.WriteAsync($"Service does not support \"{remainingPath}\""); } else if (httpContext.Request.Query.ContainsKey("xsd") && _options.WsdlFileOptions != null) { await ProcessXSD(httpContext); } else if (string.IsNullOrEmpty(httpContext.Request.ContentType) || httpContext.Request.Query.ContainsKey("wsdl")) { if (_options.WsdlFileOptions != null) { await ProcessMetaFromFile(httpContext); } else { await ProcessMeta(httpContext); } } } else { await ProcessOperation(httpContext, serviceProvider); } } catch (Exception ex) { _logger.LogCritical(ex, "An error occurred when trying to service a request on SOAP endpoint: {0}", httpContext.Request.Path); // Let's pass this up the middleware chain after we have logged this issue // and signaled the critical of it throw; } } else { await _next(httpContext); } } #if !NETCOREAPP3_0_OR_GREATER private static Task WriteMessageAsync(SoapMessageEncoder messageEncoder, Message responseMessage, HttpContext httpContext) { return messageEncoder.WriteMessageAsync(responseMessage, httpContext.Response.Body); } #else private static Task WriteMessageAsync(SoapMessageEncoder messageEncoder, Message responseMessage, HttpContext httpContext) { return messageEncoder.WriteMessageAsync(responseMessage, httpContext.Response.BodyWriter); } #endif private static string TryGetMultipartBoundary(HttpRequest request) { var parsedContentType = MediaTypeHeaderValue.Parse(request.ContentType); if (parsedContentType.MediaType != "multipart/related") { return null; } var boundaryValue = parsedContentType.Parameters .FirstOrDefault(p => p.Name.Equals("boundary", StringComparison.OrdinalIgnoreCase)) ?.Value; if (string.IsNullOrWhiteSpace(boundaryValue)) { return null; } return boundaryValue.Trim('"'); } private async Task<Message> ReadMessageAsync(HttpContext httpContext, SoapMessageEncoder messageEncoder) { var boundary = TryGetMultipartBoundary(httpContext.Request); if (!string.IsNullOrWhiteSpace(boundary)) { var multipartReader = new MultipartReader(boundary, httpContext.Request.Body); while (true) { var multipartSection = await multipartReader.ReadNextSectionAsync(); if (multipartSection == null) { break; } if (messageEncoder.IsContentTypeSupported(multipartSection.ContentType, true) || messageEncoder.IsContentTypeSupported(multipartSection.ContentType, false)) { return await messageEncoder.ReadMessageAsync(multipartSection.Body, 0x10000, multipartSection.ContentType); } } } #if !NETCOREAPP3_0_OR_GREATER return await messageEncoder.ReadMessageAsync(httpContext.Request.Body, 0x10000, httpContext.Request.ContentType); #else return await messageEncoder.ReadMessageAsync(httpContext.Request.BodyReader, 0x10000, httpContext.Request.ContentType); #endif } private async Task ProcessMeta(HttpContext httpContext) { var baseUrl = httpContext.Request.Scheme + "://" + httpContext.Request.Host + httpContext.Request.PathBase + httpContext.Request.Path; var xmlNamespaceManager = GetXmlNamespaceManager(null); var bindingName = "BasicHttpBinding_" + _service.GeneralContract.Name; var bodyWriter = _options.SoapSerializer == SoapSerializer.XmlSerializer ? new MetaBodyWriter(_service, baseUrl, xmlNamespaceManager, bindingName, _messageEncoders.Select(me => new SoapBindingInfo(me.MessageVersion, me.BindingName, me.PortName)).ToArray()) : (BodyWriter)new MetaWCFBodyWriter(_service, baseUrl, bindingName, _options.UseBasicAuthentication); //assumption that you want soap12 if your service supports that var messageEncoder = _messageEncoders.FirstOrDefault(me => me.MessageVersion == MessageVersion.Soap12WSAddressing10 || me.MessageVersion == MessageVersion.Soap12WSAddressingAugust2004) ?? _messageEncoders[0]; using var responseMessage = new MetaMessage( Message.CreateMessage(messageEncoder.MessageVersion, null, bodyWriter), _service, GetXmlNamespaceManager(messageEncoder), bindingName, _options.UseBasicAuthentication); //we should use text/xml in wsdl page for browser compability. httpContext.Response.ContentType = "text/xml;charset=UTF-8"; // _messageEncoders[0].ContentType; await WriteMessageAsync(messageEncoder, responseMessage, httpContext); } private async Task ProcessOperation(HttpContext httpContext, IServiceProvider serviceProvider) { // Get the encoder based on Content Type var messageEncoder = _messageEncoders.FirstOrDefault(me => me.IsContentTypeSupported(httpContext.Request.ContentType, true)) ?? _messageEncoders.FirstOrDefault(me => me.IsContentTypeSupported(httpContext.Request.ContentType, false)) ?? _messageEncoders[0]; Message requestMessage = null; Message responseMessage = null; try { //Get the message requestMessage = await ReadMessageAsync(httpContext, messageEncoder); var asyncMessageFilters = serviceProvider.GetServices<IAsyncMessageFilter>().ToArray(); foreach (var messageFilter in asyncMessageFilters) { await messageFilter.OnRequestExecuting(requestMessage); } var soapMessageProcessors = serviceProvider.GetServices<ISoapMessageProcessor>().ToArray(); var processorPipe = MakeProcessorPipe(soapMessageProcessors, httpContext, (requestMessage) => ProcessMessage(requestMessage, messageEncoder, asyncMessageFilters, httpContext, serviceProvider)); responseMessage = await processorPipe(requestMessage); } catch (Exception ex) { if (ex is TargetInvocationException targetInvocationException) { ex = targetInvocationException.InnerException; } responseMessage = CreateErrorResponseMessage(ex, StatusCodes.Status500InternalServerError, serviceProvider, requestMessage, messageEncoder, httpContext); } if (responseMessage != null) { await WriteMessageAsync(messageEncoder, responseMessage, httpContext); } } private Func<Message, Task<Message>> MakeProcessorPipe(ISoapMessageProcessor[] soapMessageProcessors, HttpContext httpContext, Func<Message, Task<Message>> processMessageFunc) { Func<Message, Task<Message>> MakeProcessorPipe(int i = 0) { if (i < soapMessageProcessors.Length) { return (requestMessage) => soapMessageProcessors[i].ProcessMessage(requestMessage, httpContext, MakeProcessorPipe(i + 1)); } else { return processMessageFunc; } } return MakeProcessorPipe(); } private async Task<Message> ProcessMessage(Message requestMessage, SoapMessageEncoder messageEncoder, IAsyncMessageFilter[] asyncMessageFilters, HttpContext httpContext, IServiceProvider serviceProvider) { Message responseMessage; var soapAction = HeadersHelper.GetSoapAction(httpContext, ref requestMessage); requestMessage.Headers.Action = soapAction; if (string.IsNullOrEmpty(soapAction)) { throw new ArgumentException($"Unable to handle request without a valid action parameter. Please supply a valid soap action."); } var messageInspector2s = serviceProvider.GetServices<IMessageInspector2>(); var correlationObjects2 = default(List<(IMessageInspector2 inspector, object correlationObject)>); correlationObjects2 = messageInspector2s.Select(mi => (inspector: mi, correlationObject: mi.AfterReceiveRequest(ref requestMessage, _service))).ToList(); // for getting soapaction and parameters in (optional) body // GetReaderAtBodyContents must not be called twice in one request XmlDictionaryReader reader = null; if (!requestMessage.IsEmpty) { reader = requestMessage.GetReaderAtBodyContents(); } try { var operation = _service.Operations.FirstOrDefault(o => o.SoapAction.Equals(soapAction, StringComparison.Ordinal) || o.Name.Equals(HeadersHelper.GetTrimmedSoapAction(soapAction), StringComparison.Ordinal) || soapAction.Equals(HeadersHelper.GetTrimmedSoapAction(o.Name), StringComparison.Ordinal)); if (operation == null) { operation = _service.Operations.FirstOrDefault(o => soapAction.Equals(HeadersHelper.GetTrimmedClearedSoapAction(o.SoapAction), StringComparison.Ordinal)); } if (operation == null) { throw new InvalidOperationException($"No operation found for specified action: {requestMessage.Headers.Action}"); } _logger.LogInformation("Request for operation {0}.{1} received", operation.Contract.Name, operation.Name); //Create an instance of the service class var serviceInstance = serviceProvider.GetRequiredService(_service.ServiceType); SetMessageHeadersToProperty(requestMessage, serviceInstance); // Get operation arguments from message var arguments = GetRequestArguments(requestMessage, reader, operation, httpContext); ExecuteFiltersAndTune(httpContext, serviceProvider, operation, arguments, serviceInstance); var invoker = serviceProvider.GetService<IOperationInvoker>() ?? new DefaultOperationInvoker(); var responseObject = await invoker.InvokeAsync(operation.DispatchMethod, serviceInstance, arguments); if (operation.IsOneWay) { httpContext.Response.StatusCode = (int)HttpStatusCode.Accepted; return null; } var resultOutDictionary = new Dictionary<string, object>(); foreach (var parameterInfo in operation.OutParameters) { resultOutDictionary[parameterInfo.Name] = arguments[parameterInfo.Index]; } responseMessage = CreateResponseMessage(operation, responseObject, resultOutDictionary, soapAction, requestMessage, messageEncoder); httpContext.Response.ContentType = httpContext.Request.ContentType; httpContext.Response.Headers["SOAPAction"] = responseMessage.Headers.Action; correlationObjects2.ForEach(mi => mi.inspector.BeforeSendReply(ref responseMessage, _service, mi.correlationObject)); } finally { reader?.Dispose(); } // Execute response message filters foreach (var messageFilter in asyncMessageFilters.Reverse()) { await messageFilter.OnResponseExecuting(responseMessage); } SetHttpResponse(httpContext, responseMessage); return responseMessage; } private Message CreateResponseMessage( OperationDescription operation, object responseObject, Dictionary<string, object> resultOutDictionary, string soapAction, Message requestMessage, SoapMessageEncoder soapMessageEncoder) { T_MESSAGE responseMessage; // Create response message var bodyWriter = new ServiceBodyWriter(_options.SoapSerializer, operation, responseObject, resultOutDictionary); var xmlNamespaceManager = GetXmlNamespaceManager(soapMessageEncoder); if (soapMessageEncoder.MessageVersion.Addressing == AddressingVersion.WSAddressing10) { responseMessage = new T_MESSAGE { Message = Message.CreateMessage(soapMessageEncoder.MessageVersion, soapAction, bodyWriter), AdditionalEnvelopeXmlnsAttributes = _options.AdditionalEnvelopeXmlnsAttributes, NamespaceManager = xmlNamespaceManager }; responseMessage.Headers.Action = operation.ReplyAction; responseMessage.Headers.RelatesTo = requestMessage.Headers.MessageId; responseMessage.Headers.To = requestMessage.Headers.ReplyTo?.Uri; } else { responseMessage = new T_MESSAGE { Message = Message.CreateMessage(soapMessageEncoder.MessageVersion, null, bodyWriter), AdditionalEnvelopeXmlnsAttributes = _options.AdditionalEnvelopeXmlnsAttributes, NamespaceManager = xmlNamespaceManager }; } if (responseObject != null) { var messageHeaderMembers = responseObject.GetType().GetMembersWithAttribute<MessageHeaderAttribute>(); foreach (var messageHeaderMember in messageHeaderMembers) { var messageHeaderAttribute = messageHeaderMember.GetCustomAttribute<MessageHeaderAttribute>(); responseMessage.Headers.Add(MessageHeader.CreateHeader(messageHeaderAttribute.Name ?? messageHeaderMember.Name, messageHeaderAttribute.Namespace ?? operation.Contract.Namespace, messageHeaderMember.GetPropertyOrFieldValue(responseObject), messageHeaderAttribute.MustUnderstand)); } } return responseMessage; } private void ExecuteFiltersAndTune(HttpContext httpContext, IServiceProvider serviceProvider, OperationDescription operation, object[] arguments, object serviceInstance) { // Execute model binding filters object modelBindingOutput = null; foreach (var modelBindingFilter in serviceProvider.GetServices<IModelBindingFilter>()) { foreach (var modelType in modelBindingFilter.ModelTypes) { foreach (var parameterInfo in operation.InParameters) { var arg = arguments[parameterInfo.Index]; if (arg != null && arg.GetType() == modelType) { modelBindingFilter.OnModelBound(arg, serviceProvider, out modelBindingOutput); } } } } // Execute Mvc ActionFilters foreach (var actionFilterAttr in operation.DispatchMethod.CustomAttributes.Where(a => a.AttributeType.Name == "ServiceFilterAttribute")) { var actionFilter = serviceProvider.GetService(actionFilterAttr.ConstructorArguments[0].Value as Type); actionFilter.GetType().GetMethod("OnSoapActionExecuting")?.Invoke(actionFilter, new[] { operation.Name, arguments, httpContext, modelBindingOutput }); } // Invoke OnModelBound _options.SoapModelBounder?.OnModelBound(operation.DispatchMethod, arguments); // Tune service instance for operation call var serviceOperationTuners = serviceProvider.GetServices<IServiceOperationTuner>(); foreach (var operationTuner in serviceOperationTuners) { operationTuner.Tune(httpContext, serviceInstance, operation); } } private void SetMessageHeadersToProperty(Message requestMessage, object serviceInstance) { var headerProperty = _service.ServiceType.GetProperty("MessageHeaders"); if (headerProperty != null && headerProperty.PropertyType == requestMessage.Headers.GetType()) { headerProperty.SetValue(serviceInstance, requestMessage.Headers); } } [MethodImpl(MethodImplOptions.AggressiveInlining)] private object[] GetRequestArguments(Message requestMessage, XmlDictionaryReader xmlReader, OperationDescription operation, HttpContext httpContext) { var arguments = new object[operation.AllParameters.Length]; IEnumerable<Type> serviceKnownTypes = operation .GetServiceKnownTypesHierarchy() .Select(x => x.Type); if (!operation.IsMessageContractRequest) { if (xmlReader != null) { xmlReader.ReadStartElement(operation.Name, operation.Contract.Namespace); var lastParameterIndex = -1; while (!xmlReader.EOF) { var parameterInfo = operation.InParameters.FirstOrDefault(p => p.Name == xmlReader.LocalName); if (parameterInfo == null) { xmlReader.Skip(); continue; } // prevent infinite loop (see https://github.com/DigDes/SoapCore/issues/610) if (parameterInfo.Index == lastParameterIndex) { break; } lastParameterIndex = parameterInfo.Index; var argumentValue = _serializerHelper.DeserializeInputParameter( xmlReader, parameterInfo.Parameter.ParameterType, parameterInfo.Name, operation.Contract.Namespace, parameterInfo.Parameter, serviceKnownTypes); //fix https://github.com/DigDes/SoapCore/issues/379 (hack, need research) if (argumentValue == null) { argumentValue = _serializerHelper.DeserializeInputParameter( xmlReader, parameterInfo.Parameter.ParameterType, parameterInfo.Name, parameterInfo.Namespace, parameterInfo.Parameter, serviceKnownTypes); } // sometimes there's no namespace for the parameter (ex. MS SOAP SDK) if (argumentValue == null) { argumentValue = _serializerHelper.DeserializeInputParameter( xmlReader, parameterInfo.Parameter.ParameterType, parameterInfo.Name, string.Empty, parameterInfo.Parameter, serviceKnownTypes); } arguments[parameterInfo.Index] = argumentValue; } var httpContextParameter = operation.InParameters.FirstOrDefault(x => x.Parameter.ParameterType == typeof(HttpContext)); if (httpContextParameter != default) { arguments[httpContextParameter.Index] = httpContext; } } else { arguments = Array.Empty<object>(); } } else { // MessageContracts are constrained to having one "InParameter". We can do special logic on // for this Debug.Assert(operation.InParameters.Length == 1, "MessageContracts are constrained to having one 'InParameter'"); var parameterInfo = operation.InParameters[0]; var parameterType = parameterInfo.Parameter.ParameterType; var messageContractAttribute = parameterType.GetCustomAttribute<MessageContractAttribute>(); Debug.Assert(messageContractAttribute != null, "operation.IsMessageContractRequest should be false if this is null"); var @namespace = parameterInfo.Namespace ?? operation.Contract.Namespace; if (messageContractAttribute.IsWrapped && !parameterType.GetMembersWithAttribute<MessageHeaderAttribute>().Any()) { //https://github.com/DigDes/SoapCore/issues/385 if (operation.DispatchMethod.GetCustomAttribute<XmlSerializerFormatAttribute>()?.Style == OperationFormatStyle.Rpc) { DeserializeParameters(requestMessage, xmlReader, parameterType, parameterInfo, @namespace, serviceKnownTypes, messageContractAttribute, arguments); } else { // It's wrapped so either the wrapper name or the name of the wrapper type arguments[parameterInfo.Index] = _serializerHelper.DeserializeInputParameter( xmlReader, parameterInfo.Parameter.ParameterType, messageContractAttribute.WrapperName ?? parameterInfo.Parameter.ParameterType.Name, messageContractAttribute.WrapperNamespace ?? @namespace, parameterInfo.Parameter, serviceKnownTypes); } } else { DeserializeParameters(requestMessage, xmlReader, parameterType, parameterInfo, @namespace, serviceKnownTypes, messageContractAttribute, arguments); } } foreach (var parameterInfo in operation.OutParameters) { if (arguments[parameterInfo.Index] != null) { // do not overwrite input ref parameters continue; } if (parameterInfo.Parameter.ParameterType.Name == "Guid&") { arguments[parameterInfo.Index] = Guid.Empty; } else if (parameterInfo.Parameter.ParameterType.Name == "String&" || parameterInfo.Parameter.ParameterType.GetElementType().IsArray) { arguments[parameterInfo.Index] = null; } else { var type = parameterInfo.Parameter.ParameterType.GetElementType(); arguments[parameterInfo.Index] = Activator.CreateInstance(type); } } return arguments; } // https://github.com/DigDes/SoapCore/issues/575 private void DeserializeParameters( Message requestMessage, XmlDictionaryReader xmlReader, Type parameterType, SoapMethodParameterInfo parameterInfo, string @namespace, IEnumerable<Type> serviceKnownTypes, MessageContractAttribute messageContractAttribute, object[] arguments) { var messageHeadersMembers = parameterType.GetPropertyOrFieldMembers() .Where(x => x.GetCustomAttribute<MessageHeaderAttribute>() != null) .Select(mi => new { MemberInfo = mi, MessageHeaderMemberAttribute = mi.GetCustomAttribute<MessageHeaderAttribute>() }).ToArray(); var wrapperObject = Activator.CreateInstance(parameterInfo.Parameter.ParameterType); for (var i = 0; i < requestMessage.Headers.Count; i++) { var header = requestMessage.Headers[i]; var member = messageHeadersMembers.FirstOrDefault(x => x.MessageHeaderMemberAttribute.Name == header.Name || x.MemberInfo.Name == header.Name); if (member != null) { var reader = requestMessage.Headers.GetReaderAtHeader(i); var value = _serializerHelper.DeserializeInputParameter( reader, member.MemberInfo.GetPropertyOrFieldType(), member.MessageHeaderMemberAttribute.Name ?? member.MemberInfo.Name, member.MessageHeaderMemberAttribute.Namespace ?? @namespace, member.MemberInfo, serviceKnownTypes); member.MemberInfo.SetValueToPropertyOrField(wrapperObject, value); } } var messageBodyMembers = parameterType.GetPropertyOrFieldMembers() .Where(x => x.GetCustomAttribute<MessageBodyMemberAttribute>() != null).Select(mi => new { Member = mi, MessageBodyMemberAttribute = mi.GetCustomAttribute<MessageBodyMemberAttribute>() }).OrderBy(x => x.MessageBodyMemberAttribute.Order); if (messageContractAttribute.IsWrapped) { xmlReader.Read(); } foreach (var messageBodyMember in messageBodyMembers) { var messageBodyMemberAttribute = messageBodyMember.MessageBodyMemberAttribute; var messageBodyMemberInfo = messageBodyMember.Member; var innerParameterName = messageBodyMemberAttribute.Name ?? messageBodyMemberInfo.Name; var innerParameterNs = messageBodyMemberAttribute.Namespace ?? @namespace; var innerParameterType = messageBodyMemberInfo.GetPropertyOrFieldType(); var innerParameter = _serializerHelper.DeserializeInputParameter( xmlReader, innerParameterType, innerParameterName, innerParameterNs, messageBodyMemberInfo, serviceKnownTypes); messageBodyMemberInfo.SetValueToPropertyOrField(wrapperObject, innerParameter); } arguments[parameterInfo.Index] = wrapperObject; } /// <summary> /// Helper message to write an error response message in case of an exception. /// </summary> /// <param name="exception"> /// The exception that caused the failure. /// </param> /// <param name="statusCode"> /// The HTTP status code that shall be returned to the caller. /// </param> /// <param name="serviceProvider"> /// The DI container. /// </param> /// <param name="requestMessage"> /// The Message for the incoming request /// </param> /// <param name="messageEncoder"> /// Message encoder of incoming request /// </param> /// <param name="httpContext"> /// The HTTP context that received the response message. /// </param> /// <returns> /// Returns the constructed message (which is implicitly written to the response /// and therefore must not be handled by the caller). /// </returns> private Message CreateErrorResponseMessage( Exception exception, int statusCode, IServiceProvider serviceProvider, Message requestMessage, SoapMessageEncoder messageEncoder, HttpContext httpContext) { _logger.LogError(exception, "An error occurred processing the message"); var xmlNamespaceManager = GetXmlNamespaceManager(messageEncoder); var faultExceptionTransformer = serviceProvider.GetRequiredService<IFaultExceptionTransformer>(); var faultMessage = faultExceptionTransformer.ProvideFault(exception, messageEncoder.MessageVersion, requestMessage, xmlNamespaceManager); if (!httpContext.Response.HasStarted) { httpContext.Response.ContentType = httpContext.Request.ContentType; httpContext.Response.Headers["SOAPAction"] = faultMessage.Headers.Action; httpContext.Response.StatusCode = statusCode; } SetHttpResponse(httpContext, faultMessage); if (messageEncoder.MessageVersion.Addressing == AddressingVersion.WSAddressing10) { // TODO: Some additional work needs to be done in order to support setting the action. Simply setting it to // "http://www.w3.org/2005/08/addressing/fault" will cause the WCF Client to not be able to figure out the type faultMessage.Headers.RelatesTo = requestMessage?.Headers.MessageId; faultMessage.Headers.To = requestMessage?.Headers.ReplyTo?.Uri; } return faultMessage; } private void SetHttpResponse(HttpContext httpContext, Message message) { if (!message.Properties.TryGetValue(HttpResponseMessageProperty.Name, out var value) || !(value is HttpResponseMessageProperty httpProperty)) { return; } httpContext.Response.StatusCode = (int)httpProperty.StatusCode; var feature = httpContext.Features.Get<IHttpResponseFeature>(); if (feature != null && !string.IsNullOrEmpty(httpProperty.StatusDescription)) { feature.ReasonPhrase = httpProperty.StatusDescription; } foreach (string key in httpProperty.Headers.Keys) { httpContext.Response.Headers.Add(key, httpProperty.Headers.GetValues(key)); } } private async Task ProcessXSD(HttpContext httpContext) { var meta = new MetaFromFile(); if (!string.IsNullOrEmpty(_options.WsdlFileOptions.VirtualPath)) { meta.CurrentWebServer = _options.WsdlFileOptions.VirtualPath + "/"; } meta.CurrentWebService = httpContext.Request.Path.Value.Replace("/", string.Empty); var mapping = _options.WsdlFileOptions.WebServiceWSDLMapping[meta.CurrentWebService]; meta.XsdFolder = mapping.SchemaFolder; if (_options.WsdlFileOptions.UrlOverride != string.Empty) { meta.ServerUrl = _options.WsdlFileOptions.UrlOverride; } else { meta.ServerUrl = httpContext.Request.Scheme + "://" + httpContext.Request.Host + "/"; } string xsdfile = httpContext.Request.Query["name"]; //Check to prevent path traversal if (string.IsNullOrEmpty(xsdfile) || Path.GetFileName(xsdfile) != xsdfile) { throw new ArgumentNullException("xsd parameter contains illeagal values"); } if (!xsdfile.Contains(".xsd")) { throw new Exception("xsd request must contain .xsd"); } string path = _options.WsdlFileOptions.AppPath; string safePath = path + Path.AltDirectorySeparatorChar + meta.XsdFolder + Path.AltDirectorySeparatorChar + xsdfile; string xsd = await meta.ReadLocalFileAsync(safePath); string modifiedxsd = meta.ModifyXSDAddRightSchemaPath(xsd); //we should use text/xml in wsdl page for browser compability. httpContext.Response.ContentType = "text/xml;charset=UTF-8"; await httpContext.Response.WriteAsync(modifiedxsd); } private async Task ProcessMetaFromFile(HttpContext httpContext) { var meta = new MetaFromFile(); if (!string.IsNullOrEmpty(_options.WsdlFileOptions.VirtualPath)) { meta.CurrentWebServer = _options.WsdlFileOptions.VirtualPath + "/"; } meta.CurrentWebService = httpContext.Request.Path.Value.Replace("/", string.Empty); WebServiceWSDLMapping mapping = _options.WsdlFileOptions.WebServiceWSDLMapping[meta.CurrentWebService]; meta.XsdFolder = mapping.SchemaFolder; meta.WSDLFolder = mapping.WSDLFolder; if (_options.WsdlFileOptions.UrlOverride != string.Empty) { meta.ServerUrl = _options.WsdlFileOptions.UrlOverride; } else { meta.ServerUrl = httpContext.Request.Scheme + "://" + httpContext.Request.Host + "/"; } string wsdlfile = mapping.WsdlFile; string path = _options.WsdlFileOptions.AppPath; string wsdl = await meta.ReadLocalFileAsync(path + Path.AltDirectorySeparatorChar + meta.WSDLFolder + Path.AltDirectorySeparatorChar + wsdlfile); string modifiedWsdl = meta.ModifyWSDLAddRightSchemaPath(wsdl); //we should use text/xml in wsdl page for browser compability. httpContext.Response.ContentType = "text/xml;charset=UTF-8"; await httpContext.Response.WriteAsync(modifiedWsdl); } private XmlNamespaceManager GetXmlNamespaceManager(SoapMessageEncoder messageEncoder) { var xmlNamespaceManager = new XmlNamespaceManager(new NameTable()); Namespaces.AddDefaultNamespaces(xmlNamespaceManager); xmlNamespaceManager.AddNamespace("tns", _service.GeneralContract.Namespace); if (_options.XmlNamespacePrefixOverrides != null) { foreach (var ns in _options.XmlNamespacePrefixOverrides.GetNamespacesInScope(XmlNamespaceScope.Local)) { xmlNamespaceManager.AddNamespace(ns.Key, ns.Value); } } if (messageEncoder?.XmlNamespaceOverrides != null) { foreach (var ns in messageEncoder.XmlNamespaceOverrides.GetNamespacesInScope(XmlNamespaceScope.Local)) { xmlNamespaceManager.AddNamespace(ns.Key, ns.Value); } } return xmlNamespaceManager; } } }
/* * CombinedDictionary.cs - Implementation of the * "System.Runtime.Remoting.Channels.CombinedDictionary" class. * * Copyright (C) 2003 Southern Storm Software, Pty Ltd. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ namespace System.Runtime.Remoting.Channels { #if CONFIG_REMOTING using System.Collections; // Class that combines multiple dictionaries into one. internal class CombinedDictionary : IDictionary, ICollection, IEnumerable { // Internal state. private ArrayList members; // Constructor. public CombinedDictionary(ArrayList members) { this.members = members; } // Implement the IDictionary interface. public void Add(Object key, Object value) { throw new NotSupportedException(); } public void Clear() { throw new NotSupportedException(); } public bool Contains(Object key) { foreach(IDictionary dict in members) { if(dict.Contains(key)) { return true; } } return false; } public IDictionaryEnumerator GetEnumerator() { return new CombinedEnumerator(this, Keys); } public void Remove(Object key) { throw new NotSupportedException(); } public bool IsFixedSize { get { return true; } } public bool IsReadOnly { get { return false; } } public Object this[Object key] { get { foreach(IDictionary dict in members) { if(dict.Contains(key)) { return dict[key]; } } return null; } set { foreach(IDictionary dict in members) { if(dict.Contains(key)) { dict[key] = value; } } } } public ICollection Keys { get { ArrayList keys = new ArrayList(); foreach(IDictionary dict in members) { IDictionaryEnumerator e = dict.GetEnumerator(); while(e.MoveNext()) { keys.Add(e.Key); } } return keys; } } public ICollection Values { get { ArrayList values = new ArrayList(); foreach(IDictionary dict in members) { IDictionaryEnumerator e = dict.GetEnumerator(); while(e.MoveNext()) { values.Add(e.Value); } } return values; } } // Implement the ICollection interface. public void CopyTo(Array array, int index) { throw new NotSupportedException(); } public int Count { get { int count = 0; foreach(IDictionary dict in members) { count += dict.Count; } return count; } } public bool IsSynchronized { get { return false; } } public Object SyncRoot { get { return this; } } // Implement the IEnumerable interface. IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); } // Enumerator class for "CombinedDictionary". private sealed class CombinedEnumerator : IDictionaryEnumerator { // Internal state. private CombinedDictionary dict; private ArrayList keys; private int index; // Constructor. public CombinedEnumerator(CombinedDictionary dict, ICollection keys) { this.dict = dict; this.keys = (keys as ArrayList); this.index = -1; } // Implement the IEnumerator interface. public bool MoveNext() { ++index; return (index < keys.Count); } public void Reset() { index = -1; } public Object Current { get { return new DictionaryEntry(Key, Value); } } // Implement the IDictionaryEnumerator interface. public DictionaryEntry Entry { get { return new DictionaryEntry(Key, Value); } } public Object Key { get { if(index < 0 || index >= keys.Count) { throw new InvalidOperationException (_("Invalid_BadEnumeratorPosition")); } return keys[index]; } } public Object Value { get { if(index < 0 || index >= keys.Count) { throw new InvalidOperationException (_("Invalid_BadEnumeratorPosition")); } return dict[keys[index]]; } } }; // class CombinedEnumerator }; // class CombinedDictionary #endif // CONFIG_REMOTING }; // namespace System.Runtime.Remoting.Channels
using System; using System.Drawing; using System.Drawing.Drawing2D; using System.Collections.Generic; using System.Text; namespace Banshee.Rendering { public static class DrawingMethods { //Draw Glass Hilight private static LinearGradientBrush BackBrush; private static float _cutMenuItemBack = 1.2f; private static void CreateBackBrush(Rectangle rect) { if (rect.Width > 0 && rect.Height > 0) { Color[] col = new Color[] { Color.FromArgb(83, 83, 83), Color.FromArgb(83, 83, 83), Color.FromArgb(83, 83, 0), Color.FromArgb(0, 0, 0), Color.FromArgb(255, 255, 255) }; float[] pos = new float[] { 0F, 0.47F, 0.53F, 0.75F, 1F };//{ 0.0f, 0.2f, 0.2f, 1.0f }; ColorBlend blend = new ColorBlend(); blend.Colors = col; blend.Positions = pos; BackBrush = new LinearGradientBrush(rect, Color.Transparent, Color.Transparent, LinearGradientMode.Vertical); BackBrush.InterpolationColors = blend; } } //185,209,234 public static void DrawGlassGradient(Graphics g, Rectangle rect, Color LightColor, Color DarkColor, float Angle) { Rectangle r = rect; if (r.Width == 0 || r.Height == 0) return; Blend glassBlend = new Blend(4); glassBlend.Positions[0] = 0f; glassBlend.Factors[0] = 0.0f; glassBlend.Positions[1] = 0.41f; glassBlend.Factors[1] = 0.4f; glassBlend.Positions[2] = 0.41f; glassBlend.Factors[2] = 0.6f; glassBlend.Positions[3] = 1f; glassBlend.Factors[3] = 0.2f; g.InterpolationMode = InterpolationMode.HighQualityBilinear; g.CompositingQuality = CompositingQuality.HighQuality; using (LinearGradientBrush lgb = new LinearGradientBrush(r, LightColor, DarkColor, Angle)) { lgb.Blend = glassBlend; g.FillRectangle(lgb, r); } } public static void DrawGlassBackGround(Rectangle rect, Graphics g) { g.InterpolationMode = InterpolationMode.HighQualityBilinear; g.CompositingQuality = CompositingQuality.HighQuality; if (BackBrush != null) { BackBrush = null; } CreateBackBrush(rect); g.FillRectangle(BackBrush, rect); } public static void DrawHighlight(Graphics g, Rectangle rectangle, Color SelectedColor, Color HighLight) { Color FillColor = HighLight; g.InterpolationMode = InterpolationMode.HighQualityBilinear; g.CompositingQuality = CompositingQuality.HighQuality; using (SolidBrush ForeBrush = new SolidBrush(FillColor)) { g.FillRectangle(ForeBrush, rectangle); } using (LinearGradientBrush Brush = new LinearGradientBrush(rectangle, SelectedColor, Color.Transparent, LinearGradientMode.Vertical)) { float[] pos = new float[] { 0.0f, 0.40f, 0.5f, 1.0f }; float[] factors = new float[] { 0f, 0.35f, 1f, 0.3f }; Blend ColorBlend = new Blend(4); ColorBlend.Factors = factors; ColorBlend.Positions = pos; Brush.Blend = ColorBlend; g.FillRectangle(Brush, rectangle); } } public static void DrawSoftHighlight(Graphics g, Rectangle rectangle, Color SelectedColor, Color HighLight) { using (LinearGradientBrush BackBrush = new LinearGradientBrush(rectangle, SelectedColor, Color.Transparent, LinearGradientMode.Vertical)) { g.FillRectangle(BackBrush, rectangle); } } public static void DrawGradientBorder(Graphics g, Rectangle backRect, GradientItemColors colors) { // Drawing with anti aliasing to create smoother appearance g.SmoothingMode = SmoothingMode.AntiAlias; Rectangle backRectI = backRect; backRectI.Inflate(1, 1); // Finally draw the border around the menu item using (LinearGradientBrush borderBrush = new LinearGradientBrush(backRectI, colors.Border1, colors.Border2, 90f)) { // Sigma curve, so go from color1 to color2 and back to color1 again borderBrush.SetSigmaBellShape(0.5f); // Convert the brush to a pen for DrawPath call using (Pen borderPen = new Pen(borderBrush)) { // Create border path around the entire item using (GraphicsPath borderPath = CreateBorderPath(backRect, _cutMenuItemBack)) g.DrawPath(borderPen, borderPath); } } } private static GraphicsPath CreateBorderPath(Rectangle rect, Rectangle exclude, float cut) { // If nothing to exclude, then use quicker method if (exclude.IsEmpty) return CreateBorderPath(rect, cut); // Drawing lines requires we draw inside the area we want rect.Width--; rect.Height--; // Create an array of points to draw lines between List<PointF> pts = new List<PointF>(); float l = rect.X; float t = rect.Y; float r = rect.Right; float b = rect.Bottom; float x0 = rect.X + cut; float x3 = rect.Right - cut; float y0 = rect.Y + cut; float y3 = rect.Bottom - cut; float cutBack = (cut == 0f ? 1 : cut); // Does the exclude intercept the top line if ((rect.Y >= exclude.Top) && (rect.Y <= exclude.Bottom)) { float x1 = exclude.X - 1 - cut; float x2 = exclude.Right + cut; if (x0 <= x1) { pts.Add(new PointF(x0, t)); pts.Add(new PointF(x1, t)); pts.Add(new PointF(x1 + cut, t - cutBack)); } else { x1 = exclude.X - 1; pts.Add(new PointF(x1, t)); pts.Add(new PointF(x1, t - cutBack)); } if (x3 > x2) { pts.Add(new PointF(x2 - cut, t - cutBack)); pts.Add(new PointF(x2, t)); pts.Add(new PointF(x3, t)); } else { x2 = exclude.Right; pts.Add(new PointF(x2, t - cutBack)); pts.Add(new PointF(x2, t)); } } else { pts.Add(new PointF(x0, t)); pts.Add(new PointF(x3, t)); } pts.Add(new PointF(r, y0)); pts.Add(new PointF(r, y3)); pts.Add(new PointF(x3, b)); pts.Add(new PointF(x0, b)); pts.Add(new PointF(l, y3)); pts.Add(new PointF(l, y0)); // Create path using a simple set of lines that cut the corner GraphicsPath path = new GraphicsPath(); // Add a line between each set of points for (int i = 1; i < pts.Count; i++) path.AddLine(pts[i - 1], pts[i]); // Add a line to join the last to the first path.AddLine(pts[pts.Count - 1], pts[0]); return path; } private static GraphicsPath CreateBorderPath(Rectangle rect, float cut) { // Drawing lines requires we draw inside the area we want rect.Width--; rect.Height--; // Create path using a simple set of lines that cut the corner GraphicsPath path = new GraphicsPath(); path.AddLine(rect.Left + cut, rect.Top, rect.Right - cut, rect.Top); path.AddLine(rect.Right - cut, rect.Top, rect.Right, rect.Top + cut); path.AddLine(rect.Right, rect.Top + cut, rect.Right, rect.Bottom - cut); path.AddLine(rect.Right, rect.Bottom - cut, rect.Right - cut, rect.Bottom); path.AddLine(rect.Right - cut, rect.Bottom, rect.Left + cut, rect.Bottom); path.AddLine(rect.Left + cut, rect.Bottom, rect.Left, rect.Bottom - cut); path.AddLine(rect.Left, rect.Bottom - cut, rect.Left, rect.Top + cut); path.AddLine(rect.Left, rect.Top + cut, rect.Left + cut, rect.Top); return path; } } public class GradientItemColors { public Color InsideTop1; public Color InsideTop2; public Color InsideBottom1; public Color InsideBottom2; public Color FillTop1; public Color FillTop2; public Color FillBottom1; public Color FillBottom2; public Color Border1; public Color Border2; public GradientItemColors(Color insideTop1, Color insideTop2, Color insideBottom1, Color insideBottom2, Color fillTop1, Color fillTop2, Color fillBottom1, Color fillBottom2, Color border1, Color border2) { InsideTop1 = insideTop1; InsideTop2 = insideTop2; InsideBottom1 = insideBottom1; InsideBottom2 = insideBottom2; FillTop1 = fillTop1; FillTop2 = fillTop2; FillBottom1 = fillBottom1; FillBottom2 = fillBottom2; Border1 = border1; Border2 = border2; } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Collections; using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Diagnostics.Contracts; using System.Text; namespace System.Net.Http.Headers { // This type is used to store a collection of headers in 'headerStore': // - A header can have multiple values. // - A header can have an associated parser which is able to parse the raw string value into a strongly typed object. // - If a header has an associated parser and the provided raw value can't be parsed, the value is considered // invalid. Invalid values are stored if added using TryAddWithoutValidation(). If the value was added using Add(), // Add() will throw FormatException. // - Since parsing header values is expensive and users usually only care about a few headers, header values are // lazily initialized. // // Given the properties above, a header value can have three states: // - 'raw': The header value was added using TryAddWithoutValidation() and it wasn't parsed yet. // - 'parsed': The header value was successfully parsed. It was either added using Add() where the value was parsed // immediately, or if added using TryAddWithoutValidation() a user already accessed a property/method triggering the // value to be parsed. // - 'invalid': The header value was parsed, but parsing failed because the value is invalid. Storing invalid values // allows users to still retrieve the value (by calling GetValues()), but it will not be exposed as strongly typed // object. E.g. the client receives a response with the following header: 'Via: 1.1 proxy, invalid' // - HttpHeaders.GetValues() will return "1.1 proxy", "invalid" // - HttpResponseHeaders.Via collection will only contain one ViaHeaderValue object with value "1.1 proxy" [SuppressMessage("Microsoft.Naming", "CA1710:IdentifiersShouldHaveCorrectSuffix", Justification = "This is not a collection")] public abstract class HttpHeaders : IEnumerable<KeyValuePair<string, IEnumerable<string>>> { private Dictionary<string, HeaderStoreItemInfo> _headerStore; private Dictionary<string, HttpHeaderParser> _parserStore; private HashSet<string> _invalidHeaders; private enum StoreLocation { Raw, Invalid, Parsed } protected HttpHeaders() { } public void Add(string name, string value) { CheckHeaderName(name); // We don't use GetOrCreateHeaderInfo() here, since this would create a new header in the store. If parsing // the value then throws, we would have to remove the header from the store again. So just get a // HeaderStoreItemInfo object and try to parse the value. If it works, we'll add the header. HeaderStoreItemInfo info; bool addToStore; PrepareHeaderInfoForAdd(name, out info, out addToStore); ParseAndAddValue(name, info, value); // If we get here, then the value could be parsed correctly. If we created a new HeaderStoreItemInfo, add // it to the store if we added at least one value. if (addToStore && (info.ParsedValue != null)) { AddHeaderToStore(name, info); } } public void Add(string name, IEnumerable<string> values) { if (values == null) { throw new ArgumentNullException("values"); } CheckHeaderName(name); HeaderStoreItemInfo info; bool addToStore; PrepareHeaderInfoForAdd(name, out info, out addToStore); try { // Note that if the first couple of values are valid followed by an invalid value, the valid values // will be added to the store before the exception for the invalid value is thrown. foreach (string value in values) { ParseAndAddValue(name, info, value); } } finally { // Even if one of the values was invalid, make sure we add the header for the valid ones. We need to be // consistent here: If values get added to an _existing_ header, then all values until the invalid one // get added. Same here: If multiple values get added to a _new_ header, make sure the header gets added // with the valid values. // However, if all values for a _new_ header were invalid, then don't add the header. if (addToStore && (info.ParsedValue != null)) { AddHeaderToStore(name, info); } } } public bool TryAddWithoutValidation(string name, string value) { if (!TryCheckHeaderName(name)) { return false; } if (value == null) { // We allow empty header values. (e.g. "My-Header: "). If the user adds multiple null/empty // values, we'll just add them to the collection. This will result in delimiter-only values: // E.g. adding two null-strings (or empty, or whitespace-only) results in "My-Header: ,". value = string.Empty; } HeaderStoreItemInfo info = GetOrCreateHeaderInfo(name, false); AddValue(info, value, StoreLocation.Raw); return true; } public bool TryAddWithoutValidation(string name, IEnumerable<string> values) { if (values == null) { throw new ArgumentNullException("values"); } if (!TryCheckHeaderName(name)) { return false; } HeaderStoreItemInfo info = GetOrCreateHeaderInfo(name, false); foreach (string value in values) { // We allow empty header values. (e.g. "My-Header: "). If the user adds multiple null/empty // values, we'll just add them to the collection. This will result in delimiter-only values: // E.g. adding two null-strings (or empty, or whitespace-only) results in "My-Header: ,". AddValue(info, value ?? string.Empty, StoreLocation.Raw); } return true; } public void Clear() { if (_headerStore != null) { _headerStore.Clear(); } } public bool Remove(string name) { CheckHeaderName(name); if (_headerStore == null) { return false; } return _headerStore.Remove(name); } public IEnumerable<string> GetValues(string name) { CheckHeaderName(name); IEnumerable<string> values; if (!TryGetValues(name, out values)) { throw new InvalidOperationException(SR.net_http_headers_not_found); } return values; } public bool TryGetValues(string name, out IEnumerable<string> values) { if (!TryCheckHeaderName(name)) { values = null; return false; } if (_headerStore == null) { values = null; return false; } HeaderStoreItemInfo info = null; if (TryGetAndParseHeaderInfo(name, out info)) { values = GetValuesAsStrings(info); return true; } values = null; return false; } public bool Contains(string name) { CheckHeaderName(name); if (_headerStore == null) { return false; } // We can't just call headerStore.ContainsKey() since after parsing the value the header may not exist // anymore (if the value contains invalid newline chars, we remove the header). So try to parse the // header value. HeaderStoreItemInfo info = null; return TryGetAndParseHeaderInfo(name, out info); } public override string ToString() { // Return all headers as string similar to: // HeaderName1: Value1, Value2 // HeaderName2: Value1 // ... StringBuilder sb = new StringBuilder(); foreach (var header in this) { sb.Append(header.Key); sb.Append(": "); sb.Append(this.GetHeaderString(header.Key)); sb.Append("\r\n"); } return sb.ToString(); } internal IEnumerable<KeyValuePair<string, string>> GetHeaderStrings() { if (_headerStore == null) { yield break; } foreach (var header in _headerStore) { HeaderStoreItemInfo info = header.Value; string stringValue = GetHeaderString(info); yield return new KeyValuePair<string, string>(header.Key, stringValue); } } internal string GetHeaderString(string headerName) { return GetHeaderString(headerName, null); } internal string GetHeaderString(string headerName, object exclude) { HeaderStoreItemInfo info; if (!TryGetHeaderInfo(headerName, out info)) { return string.Empty; } return GetHeaderString(info, exclude); } private string GetHeaderString(HeaderStoreItemInfo info) { return GetHeaderString(info, null); } private string GetHeaderString(HeaderStoreItemInfo info, object exclude) { string stringValue = string.Empty; // returned if values.Length == 0 string[] values = GetValuesAsStrings(info, exclude); if (values.Length == 1) { stringValue = values[0]; } else { // Note that if we get multiple values for a header that doesn't support multiple values, we'll // just separate the values using a comma (default separator). string separator = HttpHeaderParser.DefaultSeparator; if ((info.Parser != null) && (info.Parser.SupportsMultipleValues)) { separator = info.Parser.Separator; } stringValue = string.Join(separator, values); } return stringValue; } #region IEnumerable<KeyValuePair<string, IEnumerable<string>>> Members public IEnumerator<KeyValuePair<string, IEnumerable<string>>> GetEnumerator() { if (_headerStore == null) { yield break; } List<string> invalidHeaders = null; foreach (var header in _headerStore) { HeaderStoreItemInfo info = header.Value; // Make sure we parse all raw values before returning the result. Note that this has to be // done before we calculate the array length (next line): A raw value may contain a list of // values. if (!ParseRawHeaderValues(header.Key, info, false)) { // We have an invalid header value (contains invalid newline chars). Mark it as "to-be-deleted" // and skip this header. if (invalidHeaders == null) { invalidHeaders = new List<string>(); } invalidHeaders.Add(header.Key); } else { string[] values = GetValuesAsStrings(info); yield return new KeyValuePair<string, IEnumerable<string>>(header.Key, values); } } // While we were enumerating headers, we also parsed header values. If during parsing it turned out that // the header value was invalid (contains invalid newline chars), remove the header from the store after // completing the enumeration. if (invalidHeaders != null) { Debug.Assert(_headerStore != null); foreach (string invalidHeader in invalidHeaders) { _headerStore.Remove(invalidHeader); } } } #endregion #region IEnumerable Members Collections.IEnumerator Collections.IEnumerable.GetEnumerator() { return GetEnumerator(); } #endregion internal void SetConfiguration(Dictionary<string, HttpHeaderParser> parserStore, HashSet<string> invalidHeaders) { Debug.Assert(_parserStore == null, "Parser store was already set."); _parserStore = parserStore; _invalidHeaders = invalidHeaders; } internal void AddParsedValue(string name, object value) { Contract.Requires((name != null) && (name.Length > 0)); Contract.Requires(HttpRuleParser.GetTokenLength(name, 0) == name.Length); Contract.Requires(value != null); HeaderStoreItemInfo info = GetOrCreateHeaderInfo(name, true); Debug.Assert(info.Parser != null, "Can't add parsed value if there is no parser available."); // If the current header has only one value, we can't add another value. The strongly typed property // must not call AddParsedValue(), but SetParsedValue(). E.g. for headers like 'Date', 'Host'. Debug.Assert(info.CanAddValue, "Header '" + name + "' doesn't support multiple values"); AddValue(info, value, StoreLocation.Parsed); } internal void SetParsedValue(string name, object value) { Contract.Requires((name != null) && (name.Length > 0)); Contract.Requires(HttpRuleParser.GetTokenLength(name, 0) == name.Length); Contract.Requires(value != null); // This method will first clear all values. This is used e.g. when setting the 'Date' or 'Host' header. // I.e. headers not supporting collections. HeaderStoreItemInfo info = GetOrCreateHeaderInfo(name, true); Debug.Assert(info.Parser != null, "Can't add parsed value if there is no parser available."); info.InvalidValue = null; info.ParsedValue = null; info.RawValue = null; AddValue(info, value, StoreLocation.Parsed); } internal void SetOrRemoveParsedValue(string name, object value) { if (value == null) { Remove(name); } else { SetParsedValue(name, value); } } internal bool RemoveParsedValue(string name, object value) { Contract.Requires((name != null) && (name.Length > 0)); Contract.Requires(HttpRuleParser.GetTokenLength(name, 0) == name.Length); Contract.Requires(value != null); if (_headerStore == null) { return false; } // If we have a value for this header, then verify if we have a single value. If so, compare that // value with 'item'. If we have a list of values, then remove 'item' from the list. HeaderStoreItemInfo info = null; if (TryGetAndParseHeaderInfo(name, out info)) { Debug.Assert(info.Parser != null, "Can't add parsed value if there is no parser available."); Debug.Assert(info.Parser.SupportsMultipleValues, "This method should not be used for single-value headers. Use Remove(string) instead."); bool result = false; // If there is no entry, just return. if (info.ParsedValue == null) { return false; } IEqualityComparer comparer = info.Parser.Comparer; List<object> parsedValues = info.ParsedValue as List<object>; if (parsedValues == null) { Debug.Assert(info.ParsedValue.GetType() == value.GetType(), "Stored value does not have the same type as 'value'."); if (AreEqual(value, info.ParsedValue, comparer)) { info.ParsedValue = null; result = true; } } else { foreach (object item in parsedValues) { Debug.Assert(item.GetType() == value.GetType(), "One of the stored values does not have the same type as 'value'."); if (AreEqual(value, item, comparer)) { // Remove 'item' rather than 'value', since the 'comparer' may consider two values // equal even though the default obj.Equals() may not (e.g. if 'comparer' does // case-insentive comparison for strings, but string.Equals() is case-sensitive). result = parsedValues.Remove(item); break; } } // If we removed the last item in a list, remove the list. if (parsedValues.Count == 0) { info.ParsedValue = null; } } // If there is no value for the header left, remove the header. if (info.IsEmpty) { bool headerRemoved = Remove(name); Debug.Assert(headerRemoved, "Existing header '" + name + "' couldn't be removed."); } return result; } return false; } internal bool ContainsParsedValue(string name, object value) { Contract.Requires((name != null) && (name.Length > 0)); Contract.Requires(HttpRuleParser.GetTokenLength(name, 0) == name.Length); Contract.Requires(value != null); if (_headerStore == null) { return false; } // If we have a value for this header, then verify if we have a single value. If so, compare that // value with 'item'. If we have a list of values, then compare each item in the list with 'item'. HeaderStoreItemInfo info = null; if (TryGetAndParseHeaderInfo(name, out info)) { Debug.Assert(info.Parser != null, "Can't add parsed value if there is no parser available."); Debug.Assert(info.Parser.SupportsMultipleValues, "This method should not be used for single-value headers. Use equality comparer instead."); // If there is no entry, just return. if (info.ParsedValue == null) { return false; } List<object> parsedValues = info.ParsedValue as List<object>; IEqualityComparer comparer = info.Parser.Comparer; if (parsedValues == null) { Debug.Assert(info.ParsedValue.GetType() == value.GetType(), "Stored value does not have the same type as 'value'."); return AreEqual(value, info.ParsedValue, comparer); } else { foreach (object item in parsedValues) { Debug.Assert(item.GetType() == value.GetType(), "One of the stored values does not have the same type as 'value'."); if (AreEqual(value, item, comparer)) { return true; } } return false; } } return false; } internal virtual void AddHeaders(HttpHeaders sourceHeaders) { Contract.Requires(sourceHeaders != null); Debug.Assert(_parserStore == sourceHeaders._parserStore, "Can only copy headers from an instance with the same header parsers."); if (sourceHeaders._headerStore == null) { return; } List<string> invalidHeaders = null; foreach (var header in sourceHeaders._headerStore) { // Only add header values if they're not already set on the message. Note that we don't merge // collections: If both the default headers and the message have set some values for a certain // header, then we don't try to merge the values. if ((_headerStore == null) || (!_headerStore.ContainsKey(header.Key))) { HeaderStoreItemInfo sourceInfo = header.Value; // If DefaultRequestHeaders values are copied to multiple messages, it is useful to parse these // default header values only once. This is what we're doing here: By parsing raw headers in // 'sourceHeaders' before copying values to our header store. if (!sourceHeaders.ParseRawHeaderValues(header.Key, sourceInfo, false)) { // If after trying to parse source header values no value is left (i.e. all values contain // invalid newline chars), mark this header as 'to-be-deleted' and skip to the next header. if (invalidHeaders == null) { invalidHeaders = new List<string>(); } invalidHeaders.Add(header.Key); } else { AddHeaderInfo(header.Key, sourceInfo); } } } if (invalidHeaders != null) { Debug.Assert(sourceHeaders._headerStore != null); foreach (string invalidHeader in invalidHeaders) { sourceHeaders._headerStore.Remove(invalidHeader); } } } private void AddHeaderInfo(string headerName, HeaderStoreItemInfo sourceInfo) { HeaderStoreItemInfo destinationInfo = CreateAndAddHeaderToStore(headerName); Debug.Assert(sourceInfo.Parser == destinationInfo.Parser, "Expected same parser on both source and destination header store for header '" + headerName + "'."); // We have custom header values. The parsed values are strings. if (destinationInfo.Parser == null) { Debug.Assert((sourceInfo.RawValue == null) && (sourceInfo.InvalidValue == null), "No raw or invalid values expected for custom headers."); // Custom header values are always stored as string or list of strings. destinationInfo.ParsedValue = CloneStringHeaderInfoValues(sourceInfo.ParsedValue); } else { // We have a parser, so we have to copy invalid values and clone parsed values. // Invalid values are always strings. Strings are immutable. So we only have to clone the // collection (if there is one). destinationInfo.InvalidValue = CloneStringHeaderInfoValues(sourceInfo.InvalidValue); // Now clone and add parsed values (if any). if (sourceInfo.ParsedValue != null) { List<object> sourceValues = sourceInfo.ParsedValue as List<object>; if (sourceValues == null) { CloneAndAddValue(destinationInfo, sourceInfo.ParsedValue); } else { foreach (object item in sourceValues) { CloneAndAddValue(destinationInfo, item); } } } } } private static void CloneAndAddValue(HeaderStoreItemInfo destinationInfo, object source) { // We only have one value. Clone it and assign it to the store. ICloneable cloneableValue = source as ICloneable; if (cloneableValue != null) { AddValue(destinationInfo, cloneableValue.Clone(), StoreLocation.Parsed); } else { // If it doesn't implement ICloneable, it's a value type or an immutable type like String/Uri. AddValue(destinationInfo, source, StoreLocation.Parsed); } } private static object CloneStringHeaderInfoValues(object source) { if (source == null) { return null; } List<object> sourceValues = source as List<object>; if (sourceValues == null) { // If we just have one value, return the reference to the string (strings are immutable so it's OK // to use the reference). return source; } else { // If we have a list of strings, create a new list and copy all strings to the new list. return new List<object>(sourceValues); } } private HeaderStoreItemInfo GetOrCreateHeaderInfo(string name, bool parseRawValues) { Contract.Requires((name != null) && (name.Length > 0)); Contract.Requires(HttpRuleParser.GetTokenLength(name, 0) == name.Length); Contract.Ensures(Contract.Result<HeaderStoreItemInfo>() != null); HeaderStoreItemInfo result = null; bool found = false; if (parseRawValues) { found = TryGetAndParseHeaderInfo(name, out result); } else { found = TryGetHeaderInfo(name, out result); } if (!found) { result = CreateAndAddHeaderToStore(name); } return result; } private HeaderStoreItemInfo CreateAndAddHeaderToStore(string name) { // If we don't have the header in the store yet, add it now. HeaderStoreItemInfo result = new HeaderStoreItemInfo(GetParser(name)); AddHeaderToStore(name, result); return result; } private void AddHeaderToStore(string name, HeaderStoreItemInfo info) { if (_headerStore == null) { _headerStore = new Dictionary<string, HeaderStoreItemInfo>( StringComparer.OrdinalIgnoreCase); } _headerStore.Add(name, info); } private bool TryGetHeaderInfo(string name, out HeaderStoreItemInfo info) { if (_headerStore == null) { info = null; return false; } return _headerStore.TryGetValue(name, out info); } private bool TryGetAndParseHeaderInfo(string name, out HeaderStoreItemInfo info) { if (TryGetHeaderInfo(name, out info)) { return ParseRawHeaderValues(name, info, true); } return false; } private bool ParseRawHeaderValues(string name, HeaderStoreItemInfo info, bool removeEmptyHeader) { // Prevent multiple threads from parsing the raw value at the same time, or else we would get // false duplicates or false nulls. lock (info) { // Unlike TryGetHeaderInfo() this method tries to parse all non-validated header values (if any) // before returning to the caller. if (info.RawValue != null) { List<string> rawValues = info.RawValue as List<string>; if (rawValues == null) { ParseSingleRawHeaderValue(name, info); } else { ParseMultipleRawHeaderValues(name, info, rawValues); } // At this point all values are either in info.ParsedValue, info.InvalidValue, or were removed since they // contain invalid newline chars. Reset RawValue. info.RawValue = null; // During parsing, we removed tha value since it contains invalid newline chars. Return false to indicate that // this is an empty header. If the caller specified to remove empty headers, we'll remove the header before // returning. if ((info.InvalidValue == null) && (info.ParsedValue == null)) { if (removeEmptyHeader) { // After parsing the raw value, no value is left because all values contain invalid newline // chars. Debug.Assert(_headerStore != null); _headerStore.Remove(name); } return false; } } } return true; } private static void ParseMultipleRawHeaderValues(string name, HeaderStoreItemInfo info, List<string> rawValues) { if (info.Parser == null) { foreach (string rawValue in rawValues) { if (!ContainsInvalidNewLine(rawValue, name)) { AddValue(info, rawValue, StoreLocation.Parsed); } } } else { foreach (string rawValue in rawValues) { if (!TryParseAndAddRawHeaderValue(name, info, rawValue, true)) { if (HttpEventSource.Log.IsEnabled()) HttpEventSource.Log.HeadersInvalidValue(name, rawValue); } } } } private static void ParseSingleRawHeaderValue(string name, HeaderStoreItemInfo info) { string rawValue = info.RawValue as string; Debug.Assert(rawValue != null, "RawValue must either be List<string> or string."); if (info.Parser == null) { if (!ContainsInvalidNewLine(rawValue, name)) { AddValue(info, rawValue, StoreLocation.Parsed); } } else { if (!TryParseAndAddRawHeaderValue(name, info, rawValue, true)) { if (HttpEventSource.Log.IsEnabled()) HttpEventSource.Log.HeadersInvalidValue(name, rawValue); } } } // See Add(name, string) internal bool TryParseAndAddValue(string name, string value) { // We don't use GetOrCreateHeaderInfo() here, since this would create a new header in the store. If parsing // the value then throws, we would have to remove the header from the store again. So just get a // HeaderStoreItemInfo object and try to parse the value. If it works, we'll add the header. HeaderStoreItemInfo info; bool addToStore; PrepareHeaderInfoForAdd(name, out info, out addToStore); bool result = TryParseAndAddRawHeaderValue(name, info, value, false); if (result && addToStore && (info.ParsedValue != null)) { // If we get here, then the value could be parsed correctly. If we created a new HeaderStoreItemInfo, add // it to the store if we added at least one value. AddHeaderToStore(name, info); } return result; } // See ParseAndAddValue private static bool TryParseAndAddRawHeaderValue(string name, HeaderStoreItemInfo info, string value, bool addWhenInvalid) { Contract.Requires(info != null); Contract.Requires(info.Parser != null); // Values are added as 'invalid' if we either can't parse the value OR if we already have a value // and the current header doesn't support multiple values: e.g. trying to add a date/time value // to the 'Date' header if we already have a date/time value will result in the second value being // added to the 'invalid' header values. if (!info.CanAddValue) { if (addWhenInvalid) { AddValue(info, value ?? string.Empty, StoreLocation.Invalid); } return false; } int index = 0; object parsedValue = null; if (info.Parser.TryParseValue(value, info.ParsedValue, ref index, out parsedValue)) { // The raw string only represented one value (which was successfully parsed). Add the value and return. if ((value == null) || (index == value.Length)) { if (parsedValue != null) { AddValue(info, parsedValue, StoreLocation.Parsed); } return true; } Debug.Assert(index < value.Length, "Parser must return an index value within the string length."); // If we successfully parsed a value, but there are more left to read, store the results in a temp // list. Only when all values are parsed successfully write the list to the store. List<object> parsedValues = new List<object>(); if (parsedValue != null) { parsedValues.Add(parsedValue); } while (index < value.Length) { if (info.Parser.TryParseValue(value, info.ParsedValue, ref index, out parsedValue)) { if (parsedValue != null) { parsedValues.Add(parsedValue); } } else { if (!ContainsInvalidNewLine(value, name) && addWhenInvalid) { AddValue(info, value, StoreLocation.Invalid); } return false; } } // All values were parsed correctly. Copy results to the store. foreach (object item in parsedValues) { AddValue(info, item, StoreLocation.Parsed); } return true; } if (!ContainsInvalidNewLine(value, name) && addWhenInvalid) { AddValue(info, value ?? string.Empty, StoreLocation.Invalid); } return false; } private static void AddValue(HeaderStoreItemInfo info, object value, StoreLocation location) { // Since we have the same pattern for all three store locations (raw, invalid, parsed), we use // this helper method to deal with adding values: // - if 'null' just set the store property to 'value' // - if 'List<T>' append 'value' to the end of the list // - if 'T', i.e. we have already a value stored (but no list), create a list, add the stored value // to the list and append 'value' at the end of the newly created list. Debug.Assert((info.Parser != null) || ((info.Parser == null) && (value.GetType() == typeof(string))), "If no parser is defined, then the value must be string."); object currentStoreValue = null; switch (location) { case StoreLocation.Raw: currentStoreValue = info.RawValue; AddValueToStoreValue<string>(info, value, ref currentStoreValue); info.RawValue = currentStoreValue; break; case StoreLocation.Invalid: currentStoreValue = info.InvalidValue; AddValueToStoreValue<string>(info, value, ref currentStoreValue); info.InvalidValue = currentStoreValue; break; case StoreLocation.Parsed: Debug.Assert((value == null) || (!(value is List<object>)), "Header value types must not derive from List<object> since this type is used internally to store " + "lists of values. So we would not be able to distinguish between a single value and a list of values."); currentStoreValue = info.ParsedValue; AddValueToStoreValue<object>(info, value, ref currentStoreValue); info.ParsedValue = currentStoreValue; break; default: Debug.Assert(false, "Unknown StoreLocation value: " + location.ToString()); break; } } private static void AddValueToStoreValue<T>(HeaderStoreItemInfo info, object value, ref object currentStoreValue) where T : class { // If there is no value set yet, then add current item as value (we don't create a list // if not required). If 'info.Value' is already assigned then make sure 'info.Value' is a // List<T> and append 'item' to the list. if (currentStoreValue == null) { currentStoreValue = value; } else { List<T> storeValues = currentStoreValue as List<T>; if (storeValues == null) { storeValues = new List<T>(2); Debug.Assert(value is T); storeValues.Add(currentStoreValue as T); currentStoreValue = storeValues; } Debug.Assert(value is T); storeValues.Add(value as T); } } // Since most of the time we just have 1 value, we don't create a List<object> for one value, but we change // the return type to 'object'. The caller has to deal with the return type (object vs. List<object>). This // is to optimize the most common scenario where a header has only one value. internal object GetParsedValues(string name) { Contract.Requires((name != null) && (name.Length > 0)); Contract.Requires(HttpRuleParser.GetTokenLength(name, 0) == name.Length); HeaderStoreItemInfo info = null; if (!TryGetAndParseHeaderInfo(name, out info)) { return null; } return info.ParsedValue; } private void PrepareHeaderInfoForAdd(string name, out HeaderStoreItemInfo info, out bool addToStore) { info = null; addToStore = false; if (!TryGetAndParseHeaderInfo(name, out info)) { info = new HeaderStoreItemInfo(GetParser(name)); addToStore = true; } } private void ParseAndAddValue(string name, HeaderStoreItemInfo info, string value) { Contract.Requires(info != null); if (info.Parser == null) { // If we don't have a parser for the header, we consider the value valid if it doesn't contains // invalid newline characters. We add the values as "parsed value". Note that we allow empty values. CheckInvalidNewLine(value); AddValue(info, value ?? string.Empty, StoreLocation.Parsed); return; } // If the header only supports 1 value, we can add the current value only if there is no // value already set. if (!info.CanAddValue) { throw new FormatException(string.Format(System.Globalization.CultureInfo.InvariantCulture, SR.net_http_headers_single_value_header, name)); } int index = 0; object parsedValue = info.Parser.ParseValue(value, info.ParsedValue, ref index); // The raw string only represented one value (which was successfully parsed). Add the value and return. // If value is null we still have to first call ParseValue() to allow the parser to decide whether null is // a valid value. If it is (i.e. no exception thrown), we set the parsed value (if any) and return. if ((value == null) || (index == value.Length)) { // If the returned value is null, then it means the header accepts empty values. I.e. we don't throw // but we don't add 'null' to the store either. if (parsedValue != null) { AddValue(info, parsedValue, StoreLocation.Parsed); } return; } Debug.Assert(index < value.Length, "Parser must return an index value within the string length."); // If we successfully parsed a value, but there are more left to read, store the results in a temp // list. Only when all values are parsed successfully write the list to the store. List<object> parsedValues = new List<object>(); if (parsedValue != null) { parsedValues.Add(parsedValue); } while (index < value.Length) { parsedValue = info.Parser.ParseValue(value, info.ParsedValue, ref index); if (parsedValue != null) { parsedValues.Add(parsedValue); } } // All values were parsed correctly. Copy results to the store. foreach (object item in parsedValues) { AddValue(info, item, StoreLocation.Parsed); } } private HttpHeaderParser GetParser(string name) { if (_parserStore == null) { return null; } HttpHeaderParser parser = null; if (_parserStore.TryGetValue(name, out parser)) { return parser; } return null; } private void CheckHeaderName(string name) { if (string.IsNullOrEmpty(name)) { throw new ArgumentException(SR.net_http_argument_empty_string, "name"); } if (HttpRuleParser.GetTokenLength(name, 0) != name.Length) { throw new FormatException(SR.net_http_headers_invalid_header_name); } if ((_invalidHeaders != null) && (_invalidHeaders.Contains(name))) { throw new InvalidOperationException(SR.net_http_headers_not_allowed_header_name); } } private bool TryCheckHeaderName(string name) { if (string.IsNullOrEmpty(name)) { return false; } if (HttpRuleParser.GetTokenLength(name, 0) != name.Length) { return false; } if ((_invalidHeaders != null) && (_invalidHeaders.Contains(name))) { return false; } return true; } private static void CheckInvalidNewLine(string value) { if (value == null) { return; } if (HttpRuleParser.ContainsInvalidNewLine(value)) { throw new FormatException(SR.net_http_headers_no_newlines); } } private static bool ContainsInvalidNewLine(string value, string name) { if (HttpRuleParser.ContainsInvalidNewLine(value)) { if (NetEventSource.Log.IsEnabled()) NetEventSource.PrintError(NetEventSource.ComponentType.Http, string.Format(System.Globalization.CultureInfo.InvariantCulture, SR.net_http_log_headers_no_newlines, name, value)); return true; } return false; } private static string[] GetValuesAsStrings(HeaderStoreItemInfo info) { return GetValuesAsStrings(info, null); } // When doing exclusion comparison, assume raw values have been parsed. private static string[] GetValuesAsStrings(HeaderStoreItemInfo info, object exclude) { Contract.Ensures(Contract.Result<string[]>() != null); int length = GetValueCount(info); string[] values = new string[length]; if (length > 0) { int currentIndex = 0; ReadStoreValues<string>(values, info.RawValue, null, null, ref currentIndex); ReadStoreValues<object>(values, info.ParsedValue, info.Parser, exclude, ref currentIndex); // Set parser parameter to 'null' for invalid values: The invalid values is always a string so we // don't need the parser to "serialize" the value to a string. ReadStoreValues<string>(values, info.InvalidValue, null, null, ref currentIndex); // The values array may not be full because some values were excluded if (currentIndex < length) { string[] trimmedValues = new string[currentIndex]; Array.Copy(values, 0, trimmedValues, 0, currentIndex); values = trimmedValues; } } return values; } private static int GetValueCount(HeaderStoreItemInfo info) { Contract.Requires(info != null); int valueCount = 0; UpdateValueCount<string>(info.RawValue, ref valueCount); UpdateValueCount<string>(info.InvalidValue, ref valueCount); UpdateValueCount<object>(info.ParsedValue, ref valueCount); return valueCount; } private static void UpdateValueCount<T>(object valueStore, ref int valueCount) { if (valueStore == null) { return; } List<T> values = valueStore as List<T>; if (values != null) { valueCount += values.Count; } else { valueCount++; } } private static void ReadStoreValues<T>(string[] values, object storeValue, HttpHeaderParser parser, T exclude, ref int currentIndex) { Contract.Requires(values != null); if (storeValue != null) { List<T> storeValues = storeValue as List<T>; if (storeValues == null) { if (ShouldAdd<T>(storeValue, parser, exclude)) { values[currentIndex] = parser == null ? storeValue.ToString() : parser.ToString(storeValue); currentIndex++; } } else { foreach (object item in storeValues) { if (ShouldAdd<T>(item, parser, exclude)) { values[currentIndex] = parser == null ? item.ToString() : parser.ToString(item); currentIndex++; } } } } } private static bool ShouldAdd<T>(object storeValue, HttpHeaderParser parser, T exclude) { bool add = true; if (parser != null && exclude != null) { if (parser.Comparer != null) { add = !parser.Comparer.Equals(exclude, storeValue); } else { add = !exclude.Equals(storeValue); } } return add; } private bool AreEqual(object value, object storeValue, IEqualityComparer comparer) { Contract.Requires(value != null); if (comparer != null) { return comparer.Equals(value, storeValue); } // We don't have a comparer, so use the Equals() method. return value.Equals(storeValue); } #region Private Classes private class HeaderStoreItemInfo { private object _rawValue; private object _invalidValue; private object _parsedValue; private HttpHeaderParser _parser; internal object RawValue { get { return _rawValue; } set { _rawValue = value; } } internal object InvalidValue { get { return _invalidValue; } set { _invalidValue = value; } } internal object ParsedValue { get { return _parsedValue; } set { _parsedValue = value; } } internal HttpHeaderParser Parser { get { return _parser; } } internal bool CanAddValue { get { Debug.Assert(_parser != null, "There should be no reason to call CanAddValue if there is no parser for the current header."); // If the header only supports one value, and we have already a value set, then we can't add // another value. E.g. the 'Date' header only supports one value. We can't add multiple timestamps // to 'Date'. // So if this is a known header, ask the parser if it supports multiple values and check wheter // we already have a (valid or invalid) value. // Note that we ignore the rawValue by purpose: E.g. we are parsing 2 raw values for a header only // supporting 1 value. When the first value gets parsed, CanAddValue returns true and we add the // parsed value to ParsedValue. When the second value is parsed, CanAddValue returns false, because // we have already a parsed value. return ((_parser.SupportsMultipleValues) || ((_invalidValue == null) && (_parsedValue == null))); } } internal bool IsEmpty { get { return ((_rawValue == null) && (_invalidValue == null) && (_parsedValue == null)); } } internal HeaderStoreItemInfo(HttpHeaderParser parser) { // Can be null. _parser = parser; } } #endregion } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Fixtures.Azure.AcceptanceTestsCustomBaseUri { using Azure; using Microsoft.Rest; using Microsoft.Rest.Azure; using Models; using Newtonsoft.Json; using System.Collections; using System.Collections.Generic; using System.Linq; using System.Net; using System.Net.Http; using System.Threading; using System.Threading.Tasks; /// <summary> /// PathsOperations operations. /// </summary> internal partial class PathsOperations : IServiceOperations<AutoRestParameterizedHostTestClient>, IPathsOperations { /// <summary> /// Initializes a new instance of the PathsOperations class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> internal PathsOperations(AutoRestParameterizedHostTestClient client) { if (client == null) { throw new System.ArgumentNullException("client"); } Client = client; } /// <summary> /// Gets a reference to the AutoRestParameterizedHostTestClient /// </summary> public AutoRestParameterizedHostTestClient Client { get; private set; } /// <summary> /// Get a 200 to test a valid base uri /// </summary> /// <param name='accountName'> /// Account Name /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse> GetEmptyWithHttpMessagesAsync(string accountName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (accountName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "accountName"); } if (Client.Host == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.Host"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("accountName", accountName); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "GetEmpty", tracingParameters); } // Construct URL var _baseUrl = Client.BaseUri; var _url = _baseUrl + (_baseUrl.EndsWith("/") ? "" : "/") + "customuri"; _url = _url.Replace("{accountName}", accountName); _url = _url.Replace("{host}", Client.Host); List<string> _queryParameters = new List<string>(); if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects var _httpRequest = new System.Net.Http.HttpRequestMessage(); System.Net.Http.HttpResponseMessage _httpResponse = null; _httpRequest.Method = new System.Net.Http.HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } } }
// ==++== // // Copyright (c) Microsoft Corporation. All rights reserved. // // ==--== namespace System.Text { using System; using System.Text; ////using System.Runtime.Serialization; // An Encoder is used to encode a sequence of blocks of characters into // a sequence of blocks of bytes. Following instantiation of an encoder, // sequential blocks of characters are converted into blocks of bytes through // calls to the GetBytes method. The encoder maintains state between the // conversions, allowing it to correctly encode character sequences that span // adjacent blocks. // // Instances of specific implementations of the Encoder abstract base // class are typically obtained through calls to the GetEncoder method // of Encoding objects. // [Serializable()] public abstract class Encoder { internal EncoderFallback m_fallback = null; [NonSerialized] internal EncoderFallbackBuffer m_fallbackBuffer = null; //// internal void SerializeEncoder( SerializationInfo info ) //// { //// info.AddValue( "m_fallback", this.m_fallback ); //// } protected Encoder() { // We don't call default reset because default reset probably isn't good if we aren't initialized. } public EncoderFallback Fallback { get { return m_fallback; } set { if(value == null) { #if EXCEPTION_STRINGS throw new ArgumentNullException( "value" ); #else throw new ArgumentNullException(); #endif } // Can't change fallback if buffer is wrong if(m_fallbackBuffer != null && m_fallbackBuffer.Remaining > 0) { #if EXCEPTION_STRINGS throw new ArgumentException( Environment.GetResourceString( "Argument_FallbackBufferNotEmpty" ), "value" ); #else throw new ArgumentException(); #endif } m_fallback = value; m_fallbackBuffer = null; } } // Note: we don't test for threading here because async access to Encoders and Decoders // doesn't work anyway. public EncoderFallbackBuffer FallbackBuffer { get { if(m_fallbackBuffer == null) { if(m_fallback != null) { m_fallbackBuffer = m_fallback.CreateFallbackBuffer(); } else { m_fallbackBuffer = EncoderFallback.ReplacementFallback.CreateFallbackBuffer(); } } return m_fallbackBuffer; } } internal bool InternalHasFallbackBuffer { get { return m_fallbackBuffer != null; } } // Reset the Encoder // // Normally if we call GetBytes() and an error is thrown we don't change the state of the encoder. This // would allow the caller to correct the error condition and try again (such as if they need a bigger buffer.) // // If the caller doesn't want to try again after GetBytes() throws an error, then they need to call Reset(). // // Virtual implimentation has to call GetBytes with flush and a big enough buffer to clear a 0 char string // We avoid GetMaxByteCount() because a) we can't call the base encoder and b) it might be really big. public virtual void Reset() { char[] charTemp = { }; byte[] byteTemp = new byte[GetByteCount( charTemp, 0, 0, true )]; GetBytes( charTemp, 0, 0, byteTemp, 0, true ); if(m_fallbackBuffer != null) { m_fallbackBuffer.Reset(); } } // Returns the number of bytes the next call to GetBytes will // produce if presented with the given range of characters and the given // value of the flush parameter. The returned value takes into // account the state in which the encoder was left following the last call // to GetBytes. The state of the encoder is not affected by a call // to this method. // public abstract int GetByteCount( char[] chars , int index , int count , bool flush ); // We expect this to be the workhorse for NLS encodings // unfortunately for existing overrides, it has to call the [] version, // which is really slow, so avoid this method if you might be calling external encodings. [CLSCompliant( false )] public virtual unsafe int GetByteCount( char* chars , int count , bool flush ) { // Validate input parameters if(chars == null) { #if EXCEPTION_STRINGS throw new ArgumentNullException( "chars", Environment.GetResourceString( "ArgumentNull_Array" ) ); #else throw new ArgumentNullException(); #endif } if(count < 0) { #if EXCEPTION_STRINGS throw new ArgumentOutOfRangeException( "count", Environment.GetResourceString( "ArgumentOutOfRange_NeedNonNegNum" ) ); #else throw new ArgumentOutOfRangeException(); #endif } char[] arrChar = new char[count]; int index; for(index = 0; index < count; index++) { arrChar[index] = chars[index]; } return GetByteCount( arrChar, 0, count, flush ); } // Encodes a range of characters in a character array into a range of bytes // in a byte array. The method encodes charCount characters from // chars starting at index charIndex, storing the resulting // bytes in bytes starting at index byteIndex. The encoding // takes into account the state in which the encoder was left following the // last call to this method. The flush parameter indicates whether // the encoder should flush any shift-states and partial characters at the // end of the conversion. To ensure correct termination of a sequence of // blocks of encoded bytes, the last call to GetBytes should specify // a value of true for the flush parameter. // // An exception occurs if the byte array is not large enough to hold the // complete encoding of the characters. The GetByteCount method can // be used to determine the exact number of bytes that will be produced for // a given range of characters. Alternatively, the GetMaxByteCount // method of the Encoding that produced this encoder can be used to // determine the maximum number of bytes that will be produced for a given // number of characters, regardless of the actual character values. // public abstract int GetBytes( char[] chars , int charIndex , int charCount , byte[] bytes , int byteIndex , bool flush ); // We expect this to be the workhorse for NLS Encodings, but for existing // ones we need a working (if slow) default implimentation) // // WARNING WARNING WARNING // // WARNING: If this breaks it could be a security threat. Obviously we // call this internally, so you need to make sure that your pointers, counts // and indexes are correct when you call this method. // // In addition, we have internal code, which will be marked as "safe" calling // this code. However this code is dependent upon the implimentation of an // external GetBytes() method, which could be overridden by a third party and // the results of which cannot be guaranteed. We use that result to copy // the byte[] to our byte* output buffer. If the result count was wrong, we // could easily overflow our output buffer. Therefore we do an extra test // when we copy the buffer so that we don't overflow byteCount either. [CLSCompliant( false )] public virtual unsafe int GetBytes( char* chars , int charCount , byte* bytes , int byteCount , bool flush ) { // Validate input parameters if(bytes == null || chars == null) { #if EXCEPTION_STRINGS throw new ArgumentNullException( bytes == null ? "bytes" : "chars", Environment.GetResourceString( "ArgumentNull_Array" ) ); #else throw new ArgumentNullException(); #endif } if(charCount < 0 || byteCount < 0) { #if EXCEPTION_STRINGS throw new ArgumentOutOfRangeException( (charCount < 0 ? "charCount" : "byteCount"), Environment.GetResourceString( "ArgumentOutOfRange_NeedNonNegNum" ) ); #else throw new ArgumentOutOfRangeException(); #endif } // Get the char array to convert char[] arrChar = new char[charCount]; int index; for(index = 0; index < charCount; index++) { arrChar[index] = chars[index]; } // Get the byte array to fill byte[] arrByte = new byte[byteCount]; // Do the work int result = GetBytes( arrChar, 0, charCount, arrByte, 0, flush ); // The only way this could fail is a bug in GetBytes BCLDebug.Assert( result <= byteCount, "Returned more bytes than we have space for" ); // Copy the byte array // WARNING: We MUST make sure that we don't copy too many bytes. We can't // rely on result because it could be a 3rd party implimentation. We need // to make sure we never copy more than byteCount bytes no matter the value // of result if(result < byteCount) { byteCount = result; } // Don't copy too many bytes! for(index = 0; index < byteCount; index++) { bytes[index] = arrByte[index]; } return byteCount; } // This method is used to avoid running out of output buffer space. // It will encode until it runs out of chars, and then it will return // true if it the entire input was converted. In either case it // will also return the number of converted chars and output bytes used. // It will only throw a buffer overflow exception if the entire lenght of bytes[] is // too small to store the next byte. (like 0 or maybe 1 or 4 for some encodings) // We're done processing this buffer only if completed returns true. // // Might consider checking Max...Count to avoid the extra counting step. // // Note that if all of the input chars are not consumed, then we'll do a /2, which means // that its likely that we didn't consume as many chars as we could have. For some // applications this could be slow. (Like trying to exactly fill an output buffer from a bigger stream) public virtual void Convert( char[] chars , int charIndex , int charCount , byte[] bytes , int byteIndex , int byteCount , bool flush , out int charsUsed , out int bytesUsed , out bool completed ) { // Validate parameters if(chars == null || bytes == null) { #if EXCEPTION_STRINGS throw new ArgumentNullException( (chars == null ? "chars" : "bytes"), Environment.GetResourceString( "ArgumentNull_Array" ) ); #else throw new ArgumentNullException(); #endif } if(charIndex < 0 || charCount < 0) { #if EXCEPTION_STRINGS throw new ArgumentOutOfRangeException( (charIndex < 0 ? "charIndex" : "charCount"), Environment.GetResourceString( "ArgumentOutOfRange_NeedNonNegNum" ) ); #else throw new ArgumentOutOfRangeException(); #endif } if(byteIndex < 0 || byteCount < 0) { #if EXCEPTION_STRINGS throw new ArgumentOutOfRangeException( (byteIndex < 0 ? "byteIndex" : "byteCount"), Environment.GetResourceString( "ArgumentOutOfRange_NeedNonNegNum" ) ); #else throw new ArgumentOutOfRangeException(); #endif } if(chars.Length - charIndex < charCount) { #if EXCEPTION_STRINGS throw new ArgumentOutOfRangeException( "chars", Environment.GetResourceString( "ArgumentOutOfRange_IndexCountBuffer" ) ); #else throw new ArgumentOutOfRangeException(); #endif } if(bytes.Length - byteIndex < byteCount) { #if EXCEPTION_STRINGS throw new ArgumentOutOfRangeException( "bytes", Environment.GetResourceString( "ArgumentOutOfRange_IndexCountBuffer" ) ); #else throw new ArgumentOutOfRangeException(); #endif } charsUsed = charCount; // Its easy to do if it won't overrun our buffer. // Note: We don't want to call unsafe version because that might be an untrusted version // which could be really unsafe and we don't want to mix it up. while(charsUsed > 0) { if(GetByteCount( chars, charIndex, charsUsed, flush ) <= byteCount) { bytesUsed = GetBytes( chars, charIndex, charsUsed, bytes, byteIndex, flush ); completed = (charsUsed == charCount && (m_fallbackBuffer == null || m_fallbackBuffer.Remaining == 0)); return; } // Try again with 1/2 the count, won't flush then 'cause won't read it all flush = false; charsUsed /= 2; } // Oops, we didn't have anything, we'll have to throw an overflow #if EXCEPTION_STRINGS throw new ArgumentException( Environment.GetResourceString( "Argument_ConversionOverflow" ) ); #else throw new ArgumentException(); #endif } // Same thing, but using pointers // // Might consider checking Max...Count to avoid the extra counting step. // // Note that if all of the input chars are not consumed, then we'll do a /2, which means // that its likely that we didn't consume as many chars as we could have. For some // applications this could be slow. (Like trying to exactly fill an output buffer from a bigger stream) [CLSCompliant( false )] public virtual unsafe void Convert( char* chars , int charCount , byte* bytes , int byteCount , bool flush , out int charsUsed , out int bytesUsed , out bool completed ) { // Validate input parameters if(bytes == null || chars == null) { #if EXCEPTION_STRINGS throw new ArgumentNullException( bytes == null ? "bytes" : "chars", Environment.GetResourceString( "ArgumentNull_Array" ) ); #else throw new ArgumentNullException(); #endif } if(charCount < 0 || byteCount < 0) { #if EXCEPTION_STRINGS throw new ArgumentOutOfRangeException( (charCount < 0 ? "charCount" : "byteCount"), Environment.GetResourceString( "ArgumentOutOfRange_NeedNonNegNum" ) ); #else throw new ArgumentOutOfRangeException(); #endif } // Get ready to do it charsUsed = charCount; // Its easy to do if it won't overrun our buffer. while(charsUsed > 0) { if(GetByteCount( chars, charsUsed, flush ) <= byteCount) { bytesUsed = GetBytes( chars, charsUsed, bytes, byteCount, flush ); completed = (charsUsed == charCount && (m_fallbackBuffer == null || m_fallbackBuffer.Remaining == 0)); return; } // Try again with 1/2 the count, won't flush then 'cause won't read it all flush = false; charsUsed /= 2; } // Oops, we didn't have anything, we'll have to throw an overflow #if EXCEPTION_STRINGS throw new ArgumentException( Environment.GetResourceString( "Argument_ConversionOverflow" ) ); #else throw new ArgumentException(); #endif } } }
using System.Diagnostics; namespace FileHelpers.WizardApp { public class frmLastVersion : frmFather { private System.Windows.Forms.TextBox txtHistory; private System.Windows.Forms.TextBox txtDescription; private System.Windows.Forms.Label label1; private System.Windows.Forms.Label label2; private System.Windows.Forms.Label label3; private System.Windows.Forms.Label label4; private System.Windows.Forms.Button cmdDownload; private System.Windows.Forms.Label lblVersion; private System.Windows.Forms.Label lblDate; private System.ComponentModel.IContainer components = null; private frmLastVersion() { // This call is required by the Windows Form Designer. InitializeComponent(); } public frmLastVersion(VersionData version) : this() { mLastVersion = version; } /// <summary> /// Clean up any resources being used. /// </summary> protected override void Dispose(bool disposing) { if (disposing) { if (components != null) components.Dispose(); } base.Dispose(disposing); } #region Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(frmLastVersion)); this.txtHistory = new System.Windows.Forms.TextBox(); this.txtDescription = new System.Windows.Forms.TextBox(); this.label1 = new System.Windows.Forms.Label(); this.label2 = new System.Windows.Forms.Label(); this.label3 = new System.Windows.Forms.Label(); this.label4 = new System.Windows.Forms.Label(); this.lblVersion = new System.Windows.Forms.Label(); this.lblDate = new System.Windows.Forms.Label(); this.cmdDownload = new System.Windows.Forms.Button(); this.SuspendLayout(); // // txtHistory // this.txtHistory.BackColor = System.Drawing.Color.Ivory; this.txtHistory.Font = new System.Drawing.Font("Tahoma", 9F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0))); this.txtHistory.Location = new System.Drawing.Point(123, 192); this.txtHistory.Multiline = true; this.txtHistory.Name = "txtHistory"; this.txtHistory.ReadOnly = true; this.txtHistory.ScrollBars = System.Windows.Forms.ScrollBars.Vertical; this.txtHistory.Size = new System.Drawing.Size(469, 184); this.txtHistory.TabIndex = 4; // // txtDescription // this.txtDescription.BackColor = System.Drawing.Color.Ivory; this.txtDescription.Font = new System.Drawing.Font("Tahoma", 9F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0))); this.txtDescription.Location = new System.Drawing.Point(123, 112); this.txtDescription.Multiline = true; this.txtDescription.Name = "txtDescription"; this.txtDescription.ReadOnly = true; this.txtDescription.ScrollBars = System.Windows.Forms.ScrollBars.Vertical; this.txtDescription.Size = new System.Drawing.Size(469, 72); this.txtDescription.TabIndex = 5; // // label1 // this.label1.BackColor = System.Drawing.Color.Transparent; this.label1.Font = new System.Drawing.Font("Tahoma", 9.75F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Point, ((byte)(0))); this.label1.ForeColor = System.Drawing.Color.Gainsboro; this.label1.Location = new System.Drawing.Point(11, 112); this.label1.Name = "label1"; this.label1.Size = new System.Drawing.Size(100, 16); this.label1.TabIndex = 6; this.label1.Text = "Description"; this.label1.TextAlign = System.Drawing.ContentAlignment.MiddleRight; // // label2 // this.label2.BackColor = System.Drawing.Color.Transparent; this.label2.Font = new System.Drawing.Font("Tahoma", 9.75F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Point, ((byte)(0))); this.label2.ForeColor = System.Drawing.Color.Gainsboro; this.label2.Location = new System.Drawing.Point(16, 192); this.label2.Name = "label2"; this.label2.Size = new System.Drawing.Size(100, 16); this.label2.TabIndex = 7; this.label2.Text = "History"; this.label2.TextAlign = System.Drawing.ContentAlignment.MiddleRight; // // label3 // this.label3.BackColor = System.Drawing.Color.Transparent; this.label3.Font = new System.Drawing.Font("Tahoma", 9.75F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Point, ((byte)(0))); this.label3.ForeColor = System.Drawing.Color.Gainsboro; this.label3.Location = new System.Drawing.Point(11, 64); this.label3.Name = "label3"; this.label3.Size = new System.Drawing.Size(100, 16); this.label3.TabIndex = 8; this.label3.Text = "Version"; this.label3.TextAlign = System.Drawing.ContentAlignment.MiddleRight; // // label4 // this.label4.BackColor = System.Drawing.Color.Transparent; this.label4.Font = new System.Drawing.Font("Tahoma", 9.75F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Point, ((byte)(0))); this.label4.ForeColor = System.Drawing.Color.Gainsboro; this.label4.Location = new System.Drawing.Point(11, 88); this.label4.Name = "label4"; this.label4.Size = new System.Drawing.Size(100, 16); this.label4.TabIndex = 9; this.label4.Text = "Release Date"; this.label4.TextAlign = System.Drawing.ContentAlignment.MiddleRight; // // lblVersion // this.lblVersion.BackColor = System.Drawing.Color.Transparent; this.lblVersion.Font = new System.Drawing.Font("Tahoma", 12F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Point, ((byte)(0))); this.lblVersion.ForeColor = System.Drawing.Color.White; this.lblVersion.Location = new System.Drawing.Point(119, 64); this.lblVersion.Name = "lblVersion"; this.lblVersion.Size = new System.Drawing.Size(100, 16); this.lblVersion.TabIndex = 10; this.lblVersion.Text = "1.6.0"; this.lblVersion.TextAlign = System.Drawing.ContentAlignment.MiddleLeft; // // lblDate // this.lblDate.BackColor = System.Drawing.Color.Transparent; this.lblDate.Font = new System.Drawing.Font("Tahoma", 12F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Point, ((byte)(0))); this.lblDate.ForeColor = System.Drawing.Color.White; this.lblDate.Location = new System.Drawing.Point(119, 88); this.lblDate.Name = "lblDate"; this.lblDate.Size = new System.Drawing.Size(140, 21); this.lblDate.TabIndex = 11; this.lblDate.Text = "12/Jun/07"; this.lblDate.TextAlign = System.Drawing.ContentAlignment.MiddleLeft; // // cmdDownload // this.cmdDownload.BackColor = System.Drawing.Color.FromArgb(((int)(((byte)(0)))), ((int)(((byte)(0)))), ((int)(((byte)(110))))); this.cmdDownload.FlatStyle = System.Windows.Forms.FlatStyle.Flat; this.cmdDownload.Font = new System.Drawing.Font("Tahoma", 11.25F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Point, ((byte)(0))); this.cmdDownload.ForeColor = System.Drawing.Color.Gainsboro; this.cmdDownload.Image = ((System.Drawing.Image)(resources.GetObject("cmdDownload.Image"))); this.cmdDownload.ImageAlign = System.Drawing.ContentAlignment.MiddleRight; this.cmdDownload.Location = new System.Drawing.Point(123, 382); this.cmdDownload.Name = "cmdDownload"; this.cmdDownload.Size = new System.Drawing.Size(224, 40); this.cmdDownload.TabIndex = 12; this.cmdDownload.Text = "Open Download Page"; this.cmdDownload.UseVisualStyleBackColor = false; this.cmdDownload.Click += new System.EventHandler(this.cmdDownload_Click); // // frmLastVersion // this.AutoScaleBaseSize = new System.Drawing.Size(5, 14); this.ClientSize = new System.Drawing.Size(610, 461); this.Controls.Add(this.cmdDownload); this.Controls.Add(this.lblDate); this.Controls.Add(this.lblVersion); this.Controls.Add(this.label4); this.Controls.Add(this.label3); this.Controls.Add(this.label2); this.Controls.Add(this.label1); this.Controls.Add(this.txtDescription); this.Controls.Add(this.txtHistory); this.Name = "frmLastVersion"; this.Text = "FileHelpers - New Version Info"; this.Load += new System.EventHandler(this.frmLastVersion_Load); this.Controls.SetChildIndex(this.txtHistory, 0); this.Controls.SetChildIndex(this.txtDescription, 0); this.Controls.SetChildIndex(this.label1, 0); this.Controls.SetChildIndex(this.label2, 0); this.Controls.SetChildIndex(this.label3, 0); this.Controls.SetChildIndex(this.label4, 0); this.Controls.SetChildIndex(this.lblVersion, 0); this.Controls.SetChildIndex(this.lblDate, 0); this.Controls.SetChildIndex(this.cmdDownload, 0); this.ResumeLayout(false); this.PerformLayout(); } #endregion public VersionData mLastVersion; private void cmdDownload_Click(object sender, System.EventArgs e) { Process.Start("explorer", "\"" + mLastVersion.DownloadUrl + "\""); } private void frmLastVersion_Load(object sender, System.EventArgs e) { txtDescription.Text = mLastVersion.Description; txtHistory.Text = mLastVersion.History; lblVersion.Text = mLastVersion.Version; lblDate.Text = mLastVersion.ReleaseDate.ToString("dd-MMM-yyyy"); } } }
using System; using UnityEngine; using System.Collections; using System.Collections.Generic; public class FFacetRenderLayer : FRenderableLayerInterface { public int batchIndex; protected FStage _stage; protected FFacetType _facetType; protected FAtlas _atlas; protected FShader _shader; protected GameObject _gameObject; protected Transform _transform; protected Material _material; protected MeshFilter _meshFilter; protected MeshRenderer _meshRenderer; protected Mesh _mesh; //Mesh stuff protected Vector3[] _vertices = new Vector3[0]; protected int[] _triangles = new int[0]; protected Vector2[] _uvs = new Vector2[0]; protected Color[] _colors = new Color[0]; protected bool _isMeshDirty = false; protected bool _didVertsChange = false; protected bool _didUVsChange = false; protected bool _didColorsChange = false; protected bool _didVertCountChange = false; protected bool _doesMeshNeedClear = false; protected bool _shouldUpdateBounds = false; protected int _expansionAmount; protected int _maxEmptyFacets; protected int _maxFacetCount = 0; protected int _depth = -1; protected int _nextAvailableFacetIndex; protected int _lowestZeroIndex = 0; protected bool _needsRecalculateBoundsIfTransformed = false; public FFacetRenderLayer (FStage stage, FFacetType facetType, FAtlas atlas, FShader shader) { _stage = stage; _facetType = facetType; _atlas = atlas; _shader = shader; _expansionAmount = _facetType.expansionAmount; _maxEmptyFacets = _facetType.maxEmptyAmount; batchIndex = _facetType.index*10000000 + atlas.index*10000 + shader.index; _gameObject = new GameObject("FRenderLayer ("+_stage.name+") ("+_facetType.name+")"); _transform = _gameObject.transform; _transform.parent = Futile.instance.gameObject.transform; _meshFilter = _gameObject.AddComponent<MeshFilter>(); _meshRenderer = _gameObject.AddComponent<MeshRenderer>(); _meshRenderer.castShadows = false; _meshRenderer.receiveShadows = false; _mesh = _meshFilter.mesh; _material = new Material(_shader.shader); _material.mainTexture = _atlas.texture; _meshRenderer.renderer.material = _material; #if UNITY_3_0 || UNITY_3_1 || UNITY_3_2 || UNITY_3_3 || UNITY_3_4 || UNITY_3_5 _gameObject.active = false; #else _gameObject.SetActive(false); _mesh.MarkDynamic(); #endif ExpandMaxFacetLimit(_facetType.initialAmount); UpdateTransform(); } public void Destroy() { UnityEngine.Object.Destroy(_gameObject); } public void UpdateTransform() { _transform.position = _stage.transform.position; _transform.rotation = _stage.transform.rotation; _transform.localScale = _stage.transform.localScale; if(_needsRecalculateBoundsIfTransformed) { _needsRecalculateBoundsIfTransformed = false; _mesh.RecalculateBounds(); } } public void AddToWorld () //add to the transform etc { #if UNITY_3_0 || UNITY_3_1 || UNITY_3_2 || UNITY_3_3 || UNITY_3_4 || UNITY_3_5 _gameObject.active = true; #else _gameObject.SetActive(true); #endif } public void RemoveFromWorld() //remove it from the root transform etc { #if UNITY_3_0 || UNITY_3_1 || UNITY_3_2 || UNITY_3_3 || UNITY_3_4 || UNITY_3_5 _gameObject.active = false; #else _gameObject.SetActive(false); #endif #if UNITY_EDITOR //some debug code so that layers are sorted by depth properly _gameObject.name = "FRenderLayer X ("+_stage.name+") (" + _atlas.name + " " + _shader.name+" "+_facetType.name+ ")"; #endif } public void Open () { _nextAvailableFacetIndex = 0; } public int GetNextFacetIndex (int numberOfFacetsNeeded) { int indexToReturn = _nextAvailableFacetIndex; _nextAvailableFacetIndex += numberOfFacetsNeeded; //expand the layer (if needed) now that we know how many facets we need to fit if(_nextAvailableFacetIndex-1 >= _maxFacetCount) { int deltaNeeded = (_nextAvailableFacetIndex - _maxFacetCount) + 1; ExpandMaxFacetLimit(Math.Max (deltaNeeded, _expansionAmount)); //expand it by expansionAmount or the amount needed } return indexToReturn; } public void Close () //fill remaining facets with 0,0,0 { //if we have a ton of empty facets //shrink the facets if(_nextAvailableFacetIndex < _maxFacetCount-_maxEmptyFacets) { ShrinkMaxFacetLimit(Math.Max (0,(_maxFacetCount-_nextAvailableFacetIndex)-_expansionAmount)); } FillUnusedFacetsWithZeroes(); #if UNITY_EDITOR //some debug code so that layers are sorted by depth properly _gameObject.name = "FRenderLayer "+_depth+" ("+_stage.name+") ["+_nextAvailableFacetIndex+"/"+_maxFacetCount+"] (" + _atlas.name + " " + _shader.name+" "+_facetType.name+ ")"; #endif } virtual protected void FillUnusedFacetsWithZeroes () { throw new NotImplementedException("Override me!"); } virtual protected void ShrinkMaxFacetLimit(int deltaDecrease) { throw new NotImplementedException("Override me!"); } virtual protected void ExpandMaxFacetLimit(int deltaIncrease) { throw new NotImplementedException("Override me!"); } //ACTUAL RENDERING GOES HERE public void Update(int depth) //called by the engine { if(_depth != depth) { _depth = depth; //this will set the render order correctly based on the depth _material.renderQueue = Futile.baseRenderQueueDepth+_depth; #if UNITY_EDITOR //some debug code so that layers are sorted by depth properly _gameObject.name = "FRenderLayer "+_depth+" ("+_stage.name+") ["+_nextAvailableFacetIndex+"/"+_maxFacetCount+"] (" + _atlas.name + " " + _shader.name+" "+_facetType.name+ ")"; #endif } if(_isMeshDirty) { UpdateMeshProperties(); } } protected void UpdateMeshProperties() { _isMeshDirty = false; // Were changes made to the mesh since last time? if (_didVertCountChange) { _didVertCountChange = false; _didColorsChange = false; _didVertsChange = false; _didUVsChange = false; _shouldUpdateBounds = false; //in theory we shouldn't need clear because we KNOW everything is correct //see http://docs.unity3d.com/Documentation/ScriptReference/Mesh.html if(_doesMeshNeedClear) _mesh.Clear(); _mesh.vertices = _vertices; _mesh.triangles = _triangles; _mesh.uv = _uvs; //TODO: switch to using colors32 at some point for performance //see http://docs.unity3d.com/Documentation/ScriptReference/Mesh-colors32.html _mesh.colors = _colors; } else { if (_didVertsChange) { _didVertsChange = false; _shouldUpdateBounds = true; _mesh.vertices = _vertices; } if (_shouldUpdateBounds) { //Taking this out because it seems heavy, and I don't think there are benefits //http://docs.unity3d.com/Documentation/ScriptReference/Mesh.RecalculateBounds.html //Ok nevermind, I put it back in for now because if you scroll the stage, it's needed _needsRecalculateBoundsIfTransformed = true; _shouldUpdateBounds = false; } if (_didColorsChange) { _didColorsChange = false; _mesh.colors = _colors; } if (_didUVsChange) { _didUVsChange = false; _mesh.uv = _uvs; } } } public void HandleVertsChange() { _didVertsChange = true; _didUVsChange = true; _didColorsChange = true; _isMeshDirty = true; } public int expansionAmount { set {_expansionAmount = value;} get {return _expansionAmount;} } public Vector3[] vertices { get {return _vertices;} } public Vector2[] uvs { get {return _uvs;} } public Color[] colors { get {return _colors;} } } public class FQuadRenderLayer : FFacetRenderLayer { public FQuadRenderLayer (FStage stage, FFacetType facetType, FAtlas atlas, FShader shader) : base (stage,facetType,atlas,shader) { } override protected void FillUnusedFacetsWithZeroes () { _lowestZeroIndex = Math.Max (_nextAvailableFacetIndex, Math.Min (_maxFacetCount,_lowestZeroIndex)); for(int z = _nextAvailableFacetIndex; z<_lowestZeroIndex; z++) { int vertexIndex = z*4; //the high 1000000 Z should make them get culled and not rendered because they're behind the camera //need x to be 50 so they're "in screen" and not getting culled outside the bounds //because once something is marked outside the bounds, it won't get rendered until the next mesh.Clear() //TODO: test if the high z actually gives better performance or not _vertices[vertexIndex + 0].Set(50,0,1000000); _vertices[vertexIndex + 1].Set(50,0,1000000); _vertices[vertexIndex + 2].Set(50,0,1000000); _vertices[vertexIndex + 3].Set(50,0,1000000); } _lowestZeroIndex = _nextAvailableFacetIndex; } override protected void ShrinkMaxFacetLimit(int deltaDecrease) { if(deltaDecrease <= 0) return; _maxFacetCount = Math.Max (_facetType.initialAmount, _maxFacetCount-deltaDecrease); //shrink the arrays Array.Resize (ref _vertices,_maxFacetCount*4); Array.Resize (ref _uvs,_maxFacetCount*4); Array.Resize (ref _colors,_maxFacetCount*4); Array.Resize (ref _triangles,_maxFacetCount*6); _didVertCountChange = true; _didVertsChange = true; _didUVsChange = true; _didColorsChange = true; _isMeshDirty = true; _doesMeshNeedClear = true; //we only need clear when shrinking the mesh size } override protected void ExpandMaxFacetLimit(int deltaIncrease) { if(deltaIncrease <= 0) return; int firstNewFacetIndex = _maxFacetCount; _maxFacetCount += deltaIncrease; //expand the arrays Array.Resize (ref _vertices,_maxFacetCount*4); Array.Resize (ref _uvs,_maxFacetCount*4); Array.Resize (ref _colors,_maxFacetCount*4); Array.Resize (ref _triangles,_maxFacetCount*6); //fill the triangles with the correct values for(int i = firstNewFacetIndex; i<_maxFacetCount; ++i) { _triangles[i*6 + 0] = i * 4 + 0; _triangles[i*6 + 1] = i * 4 + 1; _triangles[i*6 + 2] = i * 4 + 2; _triangles[i*6 + 3] = i * 4 + 0; _triangles[i*6 + 4] = i * 4 + 2; _triangles[i*6 + 5] = i * 4 + 3; } _didVertCountChange = true; _didVertsChange = true; _didUVsChange = true; _didColorsChange = true; _isMeshDirty = true; } } public class FTriangleRenderLayer : FFacetRenderLayer { public FTriangleRenderLayer (FStage stage, FFacetType facetType, FAtlas atlas, FShader shader) : base (stage,facetType,atlas,shader) { } override protected void FillUnusedFacetsWithZeroes () { _lowestZeroIndex = Math.Max (_nextAvailableFacetIndex, Math.Min (_maxFacetCount,_lowestZeroIndex)); //Debug.Log ("FILLING FROM " + _nextAvailableFacetIndex + " to " + _lowestZeroIndex + " with zeroes!"); for(int z = _nextAvailableFacetIndex; z<_lowestZeroIndex; z++) { int vertexIndex = z*3; //the high 1000000 Z should make them get culled and not rendered because they're behind the camera //need x to be 50 so they're "in screen" and not getting culled outside the bounds //because once something is marked outside the bounds, it won't get rendered until the next mesh.Clear() //TODO: test if the high z actually gives better performance or not _vertices[vertexIndex + 0].Set(50,0,1000000); _vertices[vertexIndex + 1].Set(50,0,1000000); _vertices[vertexIndex + 2].Set(50,0,1000000); } _lowestZeroIndex = _nextAvailableFacetIndex; } override protected void ShrinkMaxFacetLimit(int deltaDecrease) { if(deltaDecrease <= 0) return; _maxFacetCount = Math.Max (_facetType.initialAmount, _maxFacetCount-deltaDecrease); //shrink the arrays Array.Resize (ref _vertices,_maxFacetCount*3); Array.Resize (ref _uvs,_maxFacetCount*3); Array.Resize (ref _colors,_maxFacetCount*3); Array.Resize (ref _triangles,_maxFacetCount*3); _didVertCountChange = true; _didVertsChange = true; _didUVsChange = true; _didColorsChange = true; _isMeshDirty = true; _doesMeshNeedClear = true; //we only need clear when shrinking the mesh size } override protected void ExpandMaxFacetLimit(int deltaIncrease) { if(deltaIncrease <= 0) return; int firstNewFacetIndex = _maxFacetCount; _maxFacetCount += deltaIncrease; //expand the arrays Array.Resize (ref _vertices,_maxFacetCount*3); Array.Resize (ref _uvs,_maxFacetCount*3); Array.Resize (ref _colors,_maxFacetCount*3); Array.Resize (ref _triangles,_maxFacetCount*3); //fill the triangles with the correct values for(int i = firstNewFacetIndex; i<_maxFacetCount; ++i) { int threei = i*3; _triangles[threei] = threei; _triangles[threei + 1] = threei + 1; _triangles[threei + 2] = threei + 2; } _didVertCountChange = true; _didVertsChange = true; _didUVsChange = true; _didColorsChange = true; _isMeshDirty = true; } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Diagnostics; using System.IO; using System.Linq; using System.ServiceModel; using System.ServiceModel.Channels; using System.Text; using System.Threading; using System.Threading.Tasks; using Infrastructure.Common; using Xunit; public class StreamingTests : ConditionalWcfTest { [WcfFact] [Condition(nameof(Root_Certificate_Installed), nameof(Client_Certificate_Installed), nameof(Windows_Authentication_Available), nameof(Ambient_Credentials_Available))] [Issue(832, Framework = FrameworkID.NetNative)] // Windows Stream Security is not supported in NET Native [OuterLoop] public static void NetTcp_TransportSecurity_StreamedRequest_RoundTrips_String() { string testString = "Hello"; NetTcpBinding binding = null; ChannelFactory<IWcfService> factory = null; IWcfService serviceProxy = null; Stream stream = null; try { // *** SETUP *** \\ binding = binding = new NetTcpBinding(SecurityMode.Transport); binding.TransferMode = TransferMode.StreamedRequest; factory = new ChannelFactory<IWcfService>(binding, new EndpointAddress(Endpoints.Tcp_Transport_Security_Streamed_Address)); serviceProxy = factory.CreateChannel(); stream = StringToStream(testString); // *** EXECUTE *** \\ var result = serviceProxy.GetStringFromStream(stream); // *** VALIDATE *** \\ Assert.Equal(testString, result); // *** CLEANUP *** \\ ((ICommunicationObject)serviceProxy).Close(); factory.Close(); } finally { // *** ENSURE CLEANUP *** \\ ScenarioTestHelpers.CloseCommunicationObjects((ICommunicationObject)serviceProxy, factory); } } [WcfFact] [Condition(nameof(Root_Certificate_Installed), nameof(Client_Certificate_Installed), nameof(Windows_Authentication_Available), nameof(Ambient_Credentials_Available))] [Issue(832, Framework = FrameworkID.NetNative)] // Windows Stream Security is not supported in NET Native [OuterLoop] public static void NetTcp_TransportSecurity_StreamedResponse_RoundTrips_String() { string testString = "Hello"; NetTcpBinding binding = null; ChannelFactory<IWcfService> factory = null; IWcfService serviceProxy = null; try { // *** SETUP *** \\ binding = new NetTcpBinding(SecurityMode.Transport); binding.TransferMode = TransferMode.StreamedResponse; factory = new ChannelFactory<IWcfService>(binding, new EndpointAddress(Endpoints.Tcp_Transport_Security_Streamed_Address)); serviceProxy = factory.CreateChannel(); // *** EXECUTE *** \\ var returnStream = serviceProxy.GetStreamFromString(testString); var result = StreamToString(returnStream); // *** VALIDATE *** \\ Assert.Equal(testString, result); // *** CLEANUP *** \\ ((ICommunicationObject)serviceProxy).Close(); factory.Close(); } finally { // *** ENSURE CLEANUP *** \\ ScenarioTestHelpers.CloseCommunicationObjects((ICommunicationObject)serviceProxy, factory); } } [WcfFact] [Condition(nameof(Root_Certificate_Installed), nameof(Client_Certificate_Installed), nameof(Windows_Authentication_Available), nameof(Ambient_Credentials_Available))] [Issue(832, Framework = FrameworkID.NetNative)] // Windows Stream Security is not supported in NET Native [OuterLoop] public static void NetTcp_TransportSecurity_Streamed_RoundTrips_String() { string testString = "Hello"; NetTcpBinding binding = null; ChannelFactory<IWcfService> factory = null; IWcfService serviceProxy = null; Stream stream = null; try { // *** SETUP *** \\ binding = new NetTcpBinding(SecurityMode.Transport); binding.TransferMode = TransferMode.Streamed; factory = new ChannelFactory<IWcfService>(binding, new EndpointAddress(Endpoints.Tcp_Transport_Security_Streamed_Address)); serviceProxy = factory.CreateChannel(); stream = StringToStream(testString); // *** EXECUTE *** \\ var returnStream = serviceProxy.EchoStream(stream); var result = StreamToString(returnStream); // *** VALIDATE *** \\ Assert.Equal(testString, result); // *** CLEANUP *** \\ ((ICommunicationObject)serviceProxy).Close(); factory.Close(); } finally { // *** ENSURE CLEANUP *** \\ ScenarioTestHelpers.CloseCommunicationObjects((ICommunicationObject)serviceProxy, factory); } } [WcfFact] [Condition(nameof(Root_Certificate_Installed), nameof(Client_Certificate_Installed), nameof(Windows_Authentication_Available), nameof(Ambient_Credentials_Available))] [Issue(832, Framework = FrameworkID.NetNative)] // Windows Stream Security is not supported in NET Native [OuterLoop] public static void NetTcp_TransportSecurity_Streamed_MultipleReads() { string testString = ScenarioTestHelpers.CreateInterestingString(20001); NetTcpBinding binding = null; ChannelFactory<IWcfService> factory = null; IWcfService serviceProxy = null; Stream stream = null; try { // *** SETUP *** \\ binding = new NetTcpBinding(SecurityMode.Transport); binding.TransferMode = TransferMode.Streamed; factory = new ChannelFactory<IWcfService>(binding, new EndpointAddress(Endpoints.Tcp_Transport_Security_Streamed_Address)); serviceProxy = factory.CreateChannel(); stream = StringToStream(testString); // *** EXECUTE *** \\ var returnStream = serviceProxy.EchoStream(stream); var ms = new MemoryStream((int)stream.Length); var buffer = new byte[10]; int bytesRead = 0; while ((bytesRead = returnStream.ReadAsync(buffer, 0, buffer.Length).Result) != 0) { ms.Write(buffer, 0, bytesRead); } ms.Position = 0; var result = StreamToString(ms); // *** VALIDATE *** \\ Assert.Equal(testString, result); // *** CLEANUP *** \\ ((ICommunicationObject)serviceProxy).Close(); factory.Close(); } finally { // *** ENSURE CLEANUP *** \\ ScenarioTestHelpers.CloseCommunicationObjects((ICommunicationObject)serviceProxy, factory); } } [WcfFact] [Condition(nameof(Root_Certificate_Installed), nameof(Client_Certificate_Installed), nameof(Windows_Authentication_Available), nameof(Ambient_Credentials_Available))] [Issue(832, Framework = FrameworkID.NetNative)] // Windows Stream Security is not supported in NET Native [OuterLoop] public static void NetTcp_TransportSecurity_Streamed_TimeOut_Long_Running_Operation() { string testString = "Hello"; NetTcpBinding binding = null; TimeSpan serviceOperationTimeout = TimeSpan.FromMilliseconds(10000); ChannelFactory<IWcfService> factory = null; IWcfService serviceProxy = null; try { // *** SETUP *** \\ binding = new NetTcpBinding(SecurityMode.Transport); binding.TransferMode = TransferMode.Streamed; binding.SendTimeout = TimeSpan.FromMilliseconds(5000); factory = new ChannelFactory<IWcfService>(binding, new EndpointAddress(Endpoints.Tcp_Transport_Security_Streamed_Address)); serviceProxy = factory.CreateChannel(); Stopwatch watch = new Stopwatch(); watch.Start(); // *** EXECUTE *** \\ try { Assert.Throws<TimeoutException>(() => { string returnString = serviceProxy.EchoWithTimeout(testString, serviceOperationTimeout); }); } finally { watch.Stop(); // *** CLEANUP *** \\ ((ICommunicationObject)serviceProxy).Close(); factory.Close(); } // *** VALIDATE *** \\ // want to assert that this completed in > 5 s as an upper bound since the SendTimeout is 5 sec // (usual case is around 5001-5005 ms) Assert.True(watch.ElapsedMilliseconds >= 4985 && watch.ElapsedMilliseconds < 6000, String.Format("Expected timeout was {0}ms but actual was {1}ms", serviceOperationTimeout.TotalMilliseconds, watch.ElapsedMilliseconds)); } finally { // *** ENSURE CLEANUP *** \\ ScenarioTestHelpers.CloseCommunicationObjects((ICommunicationObject)serviceProxy, factory); } } [WcfFact] [Condition(nameof(Root_Certificate_Installed), nameof(Client_Certificate_Installed), nameof(Windows_Authentication_Available), nameof(Ambient_Credentials_Available))] [Issue(832, Framework = FrameworkID.NetNative)] // Windows Stream Security is not supported in NET Native [OuterLoop] public static void NetTcp_TransportSecurity_Streamed_Async_RoundTrips_String() { string testString = "Hello"; StringBuilder errorBuilder = new StringBuilder(); NetTcpBinding binding = null; ChannelFactory<IWcfService> factory = null; IWcfService serviceProxy = null; Stream stream = null; try { // *** SETUP *** \\ binding = new NetTcpBinding(SecurityMode.Transport); binding.TransferMode = TransferMode.Streamed; factory = new ChannelFactory<IWcfService>(binding, new EndpointAddress(Endpoints.Tcp_Transport_Security_Streamed_Address)); serviceProxy = factory.CreateChannel(); stream = StringToStream(testString); // *** EXECUTE *** \\ var returnStream = serviceProxy.EchoStreamAsync(stream).Result; var result = StreamToString(returnStream); // *** VALIDATE *** \\ Assert.Equal(testString, result); // *** CLEANUP *** \\ ((ICommunicationObject)serviceProxy).Close(); factory.Close(); } finally { // *** ENSURE CLEANUP *** \\ ScenarioTestHelpers.CloseCommunicationObjects((ICommunicationObject)serviceProxy, factory); } } [WcfFact] [Condition(nameof(Root_Certificate_Installed), nameof(Client_Certificate_Installed), nameof(Windows_Authentication_Available), nameof(Ambient_Credentials_Available))] [Issue(832, Framework = FrameworkID.NetNative)] // Windows Stream Security is not supported in NET Native [OuterLoop] public static void NetTcp_TransportSecurity_StreamedRequest_Async_RoundTrips_String() { string testString = "Hello"; StringBuilder errorBuilder = new StringBuilder(); NetTcpBinding binding = null; ChannelFactory<IWcfService> factory = null; IWcfService serviceProxy = null; Stream stream = null; try { // *** SETUP *** \\ binding = new NetTcpBinding(SecurityMode.Transport); binding.TransferMode = TransferMode.StreamedRequest; factory = new ChannelFactory<IWcfService>(binding, new EndpointAddress(Endpoints.Tcp_Transport_Security_Streamed_Address)); serviceProxy = factory.CreateChannel(); stream = StringToStream(testString); // *** EXECUTE *** \\ var returnStream = serviceProxy.EchoStreamAsync(stream).Result; var result = StreamToString(returnStream); // *** VALIDATE *** \\ Assert.Equal(testString, result); // *** CLEANUP *** \\ ((ICommunicationObject)serviceProxy).Close(); factory.Close(); } finally { // *** ENSURE CLEANUP *** \\ ScenarioTestHelpers.CloseCommunicationObjects((ICommunicationObject)serviceProxy, factory); } } [WcfFact] [Condition(nameof(Root_Certificate_Installed), nameof(Client_Certificate_Installed), nameof(Windows_Authentication_Available), nameof(Ambient_Credentials_Available))] [Issue(832, Framework = FrameworkID.NetNative)] // Windows Stream Security is not supported in NET Native [OuterLoop] public static void NetTcp_TransportSecurity_StreamedResponse_Async_RoundTrips_String() { string testString = "Hello"; StringBuilder errorBuilder = new StringBuilder(); NetTcpBinding binding = null; ChannelFactory<IWcfService> factory = null; IWcfService serviceProxy = null; Stream stream = null; try { // *** SETUP *** \\ binding = new NetTcpBinding(SecurityMode.Transport); binding.TransferMode = TransferMode.StreamedResponse; factory = new ChannelFactory<IWcfService>(binding, new EndpointAddress(Endpoints.Tcp_Transport_Security_Streamed_Address)); serviceProxy = factory.CreateChannel(); stream = StringToStream(testString); // *** EXECUTE *** \\ var returnStream = serviceProxy.EchoStreamAsync(stream).Result; var result = StreamToString(returnStream); // *** VALIDATE *** \\ Assert.Equal(testString, result); // *** CLEANUP *** \\ ((ICommunicationObject)serviceProxy).Close(); factory.Close(); } finally { // *** ENSURE CLEANUP *** \\ ScenarioTestHelpers.CloseCommunicationObjects((ICommunicationObject)serviceProxy, factory); } } [WcfFact] [Condition(nameof(Root_Certificate_Installed), nameof(Client_Certificate_Installed), nameof(Windows_Authentication_Available), nameof(Ambient_Credentials_Available))] [Issue(832, Framework = FrameworkID.NetNative)] // Windows Stream Security is not supported in NET Native [OuterLoop] public static void NetTcp_TransportSecurity_Streamed_RoundTrips_String_WithSingleThreadedSyncContext() { bool success = Task.Run(() => { TestTypes.SingleThreadSynchronizationContext.Run(() => { Task.Factory.StartNew(() => StreamingTests.NetTcp_TransportSecurity_Streamed_RoundTrips_String(), CancellationToken.None, TaskCreationOptions.None, TaskScheduler.FromCurrentSynchronizationContext()).Wait(); }); }).Wait(ScenarioTestHelpers.TestTimeout); Assert.True(success, "Test Scenario: NetTcp_TransportSecurity_String_Streamed_RoundTrips_WithSingleThreadedSyncContext timed-out."); } [WcfFact] [Condition(nameof(Root_Certificate_Installed), nameof(Client_Certificate_Installed), nameof(Windows_Authentication_Available), nameof(Ambient_Credentials_Available))] [Issue(832, Framework = FrameworkID.NetNative)] // Windows Stream Security is not supported in NET Native [OuterLoop] public static void NetTcp_TransportSecurity_Streamed_Async_RoundTrips_String_WithSingleThreadedSyncContext() { bool success = Task.Run(() => { TestTypes.SingleThreadSynchronizationContext.Run(() => { Task.Factory.StartNew(() => StreamingTests.NetTcp_TransportSecurity_Streamed_Async_RoundTrips_String(), CancellationToken.None, TaskCreationOptions.None, TaskScheduler.FromCurrentSynchronizationContext()).Wait(); }); }).Wait(ScenarioTestHelpers.TestTimeout); Assert.True(success, "Test Scenario: NetTcp_TransportSecurity_Streamed_Async_RoundTrips_String_WithSingleThreadedSyncContext timed-out."); } private static void PrintInnerExceptionsHresult(Exception e, StringBuilder errorBuilder) { if (e.InnerException != null) { errorBuilder.AppendLine(string.Format("\r\n InnerException type: '{0}', Hresult:'{1}'", e.InnerException, e.InnerException.HResult)); PrintInnerExceptionsHresult(e.InnerException, errorBuilder); } } private static string StreamToString(Stream stream) { var reader = new StreamReader(stream, Encoding.UTF8); return reader.ReadToEnd(); } private static Stream StringToStream(string str) { var ms = new MemoryStream(); var sw = new StreamWriter(ms, Encoding.UTF8); sw.Write(str); sw.Flush(); ms.Position = 0; return ms; } }
// Copyright 2016 Mark Raasveldt // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Drawing.Imaging; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Windows.Forms; using System.Diagnostics; namespace Tibialyzer { public partial class LootDropForm : NotificationForm { public List<Tuple<Item, int>> items; public Dictionary<Creature, int> creatures; public Dictionary<Item, List<PictureBox>> itemControls = new Dictionary<Item, List<PictureBox>>(); public Dictionary<Creature, Tuple<PictureBox, Label>> creatureControls = new Dictionary<Creature, Tuple<PictureBox, Label>>(); public Creature lootCreature; public Hunt hunt; public int initialPage = 0; public int page = 0; public const int pageHeight = 400; public const int maxCreatureHeight = 700; public const int minLootWidth = 203; private string huntName = ""; private string creatureName = ""; public string rawName = ""; private long averageGold = 0; private object updateLock = new object(); private static System.Timers.Timer updateTimer; ToolTip value_tooltip = new ToolTip(); public static Font loot_font = new Font(FontFamily.GenericSansSerif, 14, FontStyle.Bold); public LootDropForm(string command) { string[] split = command.Split('@'); if (split.Length >= 2) { huntName = split[1]; } if (split.Length >= 3) { creatureName = split[2]; } if (split.Length >= 4) { rawName = split[3]; } lootCreature = StorageManager.getCreature(creatureName); InitializeComponent(); value_tooltip.AutoPopDelay = 60000; value_tooltip.InitialDelay = 500; value_tooltip.ReshowDelay = 0; value_tooltip.ShowAlways = true; value_tooltip.UseFading = true; this.Name = "Tibialyzer (Loot Form)"; updateTimer = new System.Timers.Timer(500); updateTimer.AutoReset = false; updateTimer.Elapsed += (s, e) => { ActuallyRefreshForm(); }; } private void ActuallyRefreshForm() { lock (updateLock) { if (this.IsDisposed) return; try { updateTimer.Stop(); updateTimer.Enabled = false; this.Invoke((MethodInvoker)delegate { UpdateLootInternal(); }); } catch { } } } public static Bitmap GetStackImage(Image image, int count, Item item) { if (image == null) return new Bitmap(item.image); lock(image) { int max = image.GetFrameCount(FrameDimension.Time); int index = 0; if (count <= 5) index = count - 1; else if (count <= 10) index = 5; else if (count <= 25) index = 6; else if (count <= 50) index = 7; else index = 8; if (index >= max) index = max - 1; image.SelectActiveFrame(FrameDimension.Time, index); return new Bitmap((Image)image.Clone()); } } public static void DrawCountOnGraphics(Graphics gr, int itemCount, int offset_x, int offset_y) { int numbers = (int)Math.Floor(Math.Log(itemCount, 10)) + 1; int xoffset = 1, logamount = itemCount; for (int i = 0; i < numbers; i++) { int imagenr = logamount % 10; Image imageNumber = StyleManager.GetImage(imagenr + ".png"); xoffset = xoffset + imageNumber.Width + (itemCount >= 1000 ? 0 : 1); lock (imageNumber) { gr.DrawImage(imageNumber, new Point(offset_x - xoffset, offset_y - imageNumber.Height - 3)); } logamount /= 10; } } public static Bitmap DrawCountOnItem(Item item, int itemCount, int size = -1) { Bitmap image; if (item.stackable) { try { image = new Bitmap(LootDropForm.GetStackImage(item.image, itemCount, item)); } catch { image = new Bitmap(item.image); } } else { image = new Bitmap(item.image); } using (Graphics gr = Graphics.FromImage(image)) { DrawCountOnGraphics(gr, itemCount, image.Width, image.Height); } return image; } public static Tuple<Dictionary<Creature, int>, List<Tuple<Item, int>>> GenerateLootInformation(Hunt hunt, string rawName, Creature lootCreature) { Dictionary<Creature, int> creatureKills; List<Tuple<Item, int>> itemDrops = new List<Tuple<Item, int>>(); bool raw = rawName == "raw"; bool all = raw || rawName == "all"; List<Creature> displayedCreatures = null; if (!hunt.trackAllCreatures && hunt.trackedCreatures.Length > 0) { displayedCreatures = hunt.GetTrackedCreatures(); } else if (SettingsManager.getSettingBool("IgnoreLowExperience")) { displayedCreatures = new List<Creature>(); foreach (Creature cr in hunt.IterateCreatures()) { if (cr.experience >= SettingsManager.getSettingInt("IgnoreLowExperienceValue")) { displayedCreatures.Add(cr); } } } if (lootCreature != null) { //the command is loot@<creature>, so we only display the kills and loot from the specified creature creatureKills = hunt.GetCreatureKills(lootCreature); } else if (displayedCreatures == null) { creatureKills = hunt.GetCreatureKills(); //display all creatures //loot.killCount; } else { // only display tracked creatures creatureKills = hunt.GetCreatureKills(displayedCreatures); // new Dictionary<Creature, int>(); } // now handle item drops, gather a count for every item Dictionary<Item, int> itemCounts = new Dictionary<Item, int>(); foreach (KeyValuePair<Creature, Dictionary<Item, int>> kvp in hunt.IterateLoot()) { if (lootCreature != null && kvp.Key != lootCreature) continue; // if lootCreature is specified, only consider loot from the specified creature if (displayedCreatures != null && !displayedCreatures.Contains(kvp.Key)) continue; foreach (KeyValuePair<Item, int> kvp2 in kvp.Value) { Item item = kvp2.Key; int value = kvp2.Value; if (!itemCounts.ContainsKey(item)) itemCounts.Add(item, value); else itemCounts[item] += value; } } // now we do item conversion long extraGold = 0; foreach (KeyValuePair<Item, int> kvp in itemCounts) { Item item = kvp.Key; int count = kvp.Value; // discard items that are set to be discarded (as long as all/raw mode is not enabled) if (item.discard && !all) continue; // convert items to gold (as long as raw mode is not enabled), always gather up all the gold coins found if ((!raw && item.convert_to_gold) || item.displayname == "gold coin" || item.displayname == "platinum coin" || item.displayname == "crystal coin") { extraGold += item.GetMaxValue() * count; } else { itemDrops.Add(new Tuple<Item, int>(item, count)); } } // handle coin drops, we always convert the gold to the highest possible denomination (so if gold = 10K, we display a crystal coin) long currentGold = extraGold; if (currentGold > 10000) { itemDrops.Add(new Tuple<Item, int>(StorageManager.getItem("crystal coin"), (int)(currentGold / 10000))); currentGold = currentGold % 10000; } if (currentGold > 100) { itemDrops.Add(new Tuple<Item, int>(StorageManager.getItem("platinum coin"), (int)(currentGold / 100))); currentGold = currentGold % 100; } if (currentGold > 0) { itemDrops.Add(new Tuple<Item, int>(StorageManager.getItem("gold coin"), (int)(currentGold))); } // now order by value so most valuable items are placed first // we use a special value for the gold coins so the gold is placed together in the order crystal > platinum > gold // gold coins = <gold total> - 2, platinum coins = <gold total> - 1, crystal coins = <gold total> itemDrops = itemDrops.OrderByDescending(o => o.Item1.displayname == "gold coin" ? extraGold - 2 : (o.Item1.displayname == "platinum coin" ? extraGold - 1 : (o.Item1.displayname == "crystal coin" ? extraGold : o.Item1.GetMaxValue() * o.Item2))).ToList(); return new Tuple<Dictionary<Creature, int>, List<Tuple<Item, int>>>(creatureKills, itemDrops); } private void UpdateLootInternal() { refreshTimer(); var tpl = LootDropForm.GenerateLootInformation(hunt, rawName, lootCreature); creatures = tpl.Item1; items = tpl.Item2; this.SuspendForm(); RefreshLoot(); this.ResumeForm(); } public void UpdateLoot() { if (this.IsDisposed) return; lock (updateLock) { if (!updateTimer.Enabled) { updateTimer.Start(); } } } public static string TimeToString(long totalSeconds) { string displayString = ""; if (totalSeconds >= 3600) { displayString += (totalSeconds / 3600).ToString() + "h "; totalSeconds = totalSeconds % 3600; } if (totalSeconds >= 60) { displayString += (totalSeconds / 60).ToString() + "m "; totalSeconds = totalSeconds % 60; } displayString += totalSeconds.ToString() + "s"; return displayString; } public static long GetAverageGold(Dictionary<Creature, int> creatures) { long averageGold = 0; foreach (KeyValuePair<Creature, int> tpl in creatures) { double average = 0; foreach (ItemDrop dr in tpl.Key.itemdrops) { Item it = StorageManager.getItem(dr.itemid); if (!it.discard && it.GetMaxValue() > 0 && dr.percentage > 0) { average += ((dr.min + dr.max) / 2.0) * (dr.percentage / 100.0) * it.GetMaxValue(); } } averageGold += (int)(average * tpl.Value); } return averageGold; } public List<Control> createdControls = new List<Control>(); public void RefreshLoot() { foreach (Control c in createdControls) { this.Controls.Remove(c); c.Dispose(); } createdControls.Clear(); if (page < 0) page = 0; int base_x = 20, base_y = 30; int x = 0, y = 0; int item_spacing = 4; Size item_size = new Size(32, 32); int max_x = SettingsManager.getSettingInt("LootFormWidth"); if (max_x < minLootWidth) max_x = minLootWidth; int width_x = max_x + item_spacing * 2; long total_value = 0; int currentPage = 0; bool prevPage = page > 0; bool nextPage = false; averageGold = GetAverageGold(creatures); foreach (Tuple<Item, int> tpl in items) { total_value += tpl.Item1.GetMaxValue() * tpl.Item2; } Dictionary<Item, List<PictureBox>> newItemControls = new Dictionary<Item, List<PictureBox>>(); foreach (Tuple<Item, int> tpl in items) { Item item = tpl.Item1; int count = tpl.Item2; while (count > 0) { if (base_x + x >= (max_x - item_size.Width - item_spacing)) { x = 0; if (y + item_size.Height + item_spacing > pageHeight) { currentPage++; if (currentPage > page) { nextPage = true; break; } else { y = 0; } } else { y = y + item_size.Height + item_spacing; } } int mitems = 1; if (item.stackable || SettingsManager.getSettingBool("StackAllItems")) mitems = Math.Min(count, 100); count -= mitems; if (currentPage == page) { PictureBox picture_box; if (itemControls.ContainsKey(item)) { picture_box = itemControls[item][0]; itemControls[item].RemoveAt(0); if (itemControls[item].Count == 0) { itemControls.Remove(item); } picture_box.Location = new System.Drawing.Point(base_x + x, base_y + y); if (picture_box.TabIndex != mitems && (item.stackable || mitems > 1)) { picture_box.Image = LootDropForm.DrawCountOnItem(item, mitems); } picture_box.TabIndex = mitems; long individualValue = item.GetMaxValue(); value_tooltip.SetToolTip(picture_box, System.Threading.Thread.CurrentThread.CurrentCulture.TextInfo.ToTitleCase(item.displayname) + " value: " + (individualValue >= 0 ? (individualValue * mitems).ToString() : "Unknown")); } else { picture_box = new PictureBox(); picture_box.Location = new System.Drawing.Point(base_x + x, base_y + y); picture_box.Name = item.GetName(); picture_box.Size = new System.Drawing.Size(item_size.Width, item_size.Height); picture_box.TabIndex = mitems; picture_box.TabStop = false; if (item.stackable || mitems > 1) { picture_box.Image = LootDropForm.DrawCountOnItem(item, mitems); } else { picture_box.Image = item.GetImage(); } picture_box.SizeMode = PictureBoxSizeMode.StretchImage; picture_box.BackgroundImage = StyleManager.GetImage("item_background.png"); picture_box.Click += openItemBox; long individualValue = item.GetMaxValue(); value_tooltip.SetToolTip(picture_box, System.Threading.Thread.CurrentThread.CurrentCulture.TextInfo.ToTitleCase(item.displayname) + " value: " + (individualValue >= 0 ? (individualValue * mitems).ToString() : "Unknown")); this.Controls.Add(picture_box); } if (!newItemControls.ContainsKey(item)) newItemControls.Add(item, new List<PictureBox>()); newItemControls[item].Add(picture_box); } x += item_size.Width + item_spacing; } if (currentPage > page) { break; } } if (page > currentPage) { page = currentPage; RefreshLoot(); return; } foreach (KeyValuePair<Item, List<PictureBox>> kvp in itemControls) { foreach (PictureBox p in kvp.Value) { this.Controls.Remove(p); p.Dispose(); } } itemControls = newItemControls; y = y + item_size.Height + item_spacing; if (prevPage) { PictureBox prevpage = new PictureBox(); prevpage.Location = new Point(10, base_y + y); prevpage.Size = new Size(97, 23); prevpage.Image = StyleManager.GetImage("prevpage.png"); prevpage.BackColor = Color.Transparent; prevpage.SizeMode = PictureBoxSizeMode.StretchImage; prevpage.Click += Prevpage_Click; this.Controls.Add(prevpage); createdControls.Add(prevpage); } if (nextPage) { PictureBox nextpage = new PictureBox(); nextpage.Location = new Point(width_x - 108, base_y + y); nextpage.Size = new Size(98, 23); nextpage.BackColor = Color.Transparent; nextpage.Image = StyleManager.GetImage("nextpage.png"); nextpage.SizeMode = PictureBoxSizeMode.StretchImage; nextpage.Click += Nextpage_Click; this.Controls.Add(nextpage); createdControls.Add(nextpage); } if (prevPage || nextPage) y += 23; x = 0; base_x = 5; Size creature_size = new Size(1, 1); Size labelSize = new Size(1, 1); foreach (KeyValuePair<Creature, int> tpl in creatures) { Creature creature = tpl.Key; creature_size.Width = Math.Max(creature_size.Width, creature.GetImage().Width); creature_size.Height = Math.Max(creature_size.Height, creature.GetImage().Height); } { Dictionary<Creature, Tuple<PictureBox, Label>> newCreatureControls = new Dictionary<Creature, Tuple<PictureBox, Label>>(); int i = 0; foreach (Creature cr in creatures.Keys.OrderByDescending(o => creatures[o] * (1 + o.experience)).ToList<Creature>()) { Creature creature = cr; int killCount = creatures[cr]; if (x >= max_x - creature_size.Width - item_spacing * 2) { x = 0; y = y + creature_size.Height + 23; if (y > maxCreatureHeight) { break; } } int xoffset = (creature_size.Width - creature.GetImage().Width) / 2; int yoffset = (creature_size.Height - creature.GetImage().Height) / 2; Label count; PictureBox picture_box; if (creatureControls.ContainsKey(creature)) { picture_box = creatureControls[creature].Item1; count = creatureControls[creature].Item2; creatureControls.Remove(creature); picture_box.Location = new System.Drawing.Point(base_x + x + xoffset, base_y + y + yoffset + (creature_size.Height - creature.GetImage().Height) / 2); count.Location = new Point(base_x + x + xoffset, base_y + y + creature_size.Height); count.Text = killCount.ToString() + "x"; } else { count = new Label(); count.Text = killCount.ToString() + "x"; count.Font = loot_font; count.Size = new Size(1, 10); count.Location = new Point(base_x + x + xoffset, base_y + y + creature_size.Height); count.AutoSize = true; count.TextAlign = ContentAlignment.MiddleCenter; count.ForeColor = StyleManager.NotificationTextColor; count.BackColor = Color.Transparent; picture_box = new PictureBox(); picture_box.Location = new System.Drawing.Point(base_x + x + xoffset, base_y + y + yoffset + (creature_size.Height - creature.GetImage().Height) / 2); picture_box.Name = creature.GetName(); picture_box.Size = new System.Drawing.Size(creature.GetImage().Width, creature.GetImage().Height); picture_box.TabIndex = 1; picture_box.TabStop = false; picture_box.Image = creature.GetImage(); picture_box.SizeMode = PictureBoxSizeMode.StretchImage; picture_box.Click += openCreatureDrops; picture_box.BackColor = Color.Transparent; this.Controls.Add(picture_box); this.Controls.Add(count); } int measured_size = (int)count.CreateGraphics().MeasureString(count.Text, count.Font).Width; int width = Math.Max(measured_size, creature.GetImage().Width); if (width > creature.GetImage().Width) { picture_box.Location = new Point(picture_box.Location.X + (width - creature.GetImage().Width) / 2, picture_box.Location.Y); } else { count.Location = new Point(count.Location.X + (width - measured_size) / 2, count.Location.Y); } newCreatureControls.Add(creature, new Tuple<PictureBox, Label>(picture_box, count)); labelSize = count.Size; i++; x += width + xoffset; } y = y + creature_size.Height + labelSize.Height * 2; foreach (KeyValuePair<Creature, Tuple<PictureBox, Label>> kvp in creatureControls) { this.Controls.Remove(kvp.Value.Item1); this.Controls.Remove(kvp.Value.Item2); kvp.Value.Item1.Dispose(); kvp.Value.Item2.Dispose(); } creatureControls = newCreatureControls; } long usedItemValue = 0; foreach (var tpl in HuntManager.GetUsedItems(hunt)) { usedItemValue += tpl.Item1.GetMaxValue() * tpl.Item2; } int xPosition = width_x - totalValueValue.Size.Width - 5; y = base_y + y + item_spacing + 10; huntNameLabel.Text = hunt.name.ToString(); totalValueLabel.Location = new Point(5, y); totalValueValue.Location = new Point(xPosition, y); totalValueValue.Text = total_value.ToString("N0"); value_tooltip.SetToolTip(totalValueValue, String.Format("Average gold for these creature kills: {0} gold.", averageGold.ToString("N0"))); totalExpLabel.Location = new Point(5, y += 20); totalExpValue.Location = new Point(xPosition, y); totalExpValue.Text = hunt.totalExp.ToString("N0"); expHourValue.Text = ScanningManager.lastResults == null ? "-" : ScanningManager.lastResults.expPerHour.ToString("N0"); expHourLabel.Location = new Point(5, y += 20); expHourValue.Location = new Point(xPosition, y); totalTimeLabel.Location = new Point(5, y += 20); totalTimeValue.Location = new Point(xPosition, y); usedItemsValue.Text = usedItemValue.ToString("N0"); usedItemsLabel.Location = new Point(5, y += 20); usedItemsValue.Location = new Point(xPosition, y); long profit = total_value - usedItemValue; value_tooltip.SetToolTip(usedItemsValue, String.Format(profit > 0 ? "Total Profit: {0} gold" : "Total Waste: {0} gold", profit.ToString("N0"))); totalTimeValue.Text = TimeToString((long)hunt.totalTime); y += 20; int widthSize = width_x / 3 - 5; lootButton.Size = new Size(widthSize, lootButton.Size.Height); lootButton.Location = new Point(5, y); allLootButton.Size = new Size(widthSize, lootButton.Size.Height); allLootButton.Location = new Point(7 + widthSize, y); rawLootButton.Size = new Size(widthSize, lootButton.Size.Height); rawLootButton.Location = new Point(10 + 2 * widthSize, y); y += allLootButton.Size.Height + 2; huntNameLabel.Size = new Size(width_x, huntNameLabel.Size.Height); this.Size = new Size(width_x, y + 5); lootLarger.Location = new Point(Size.Width - lootLarger.Size.Width - 4, 4); lootSmaller.Location = new Point(Size.Width - 2 * lootLarger.Size.Width - 4, 4); } public override void LoadForm() { this.NotificationInitialize(); UnregisterControl(lootSmaller); UnregisterControl(lootLarger); UnregisterControl(rawLootButton); UnregisterControl(allLootButton); UnregisterControl(lootButton); UpdateLootInternal(); base.NotificationFinalize(); } private void Prevpage_Click(object sender, EventArgs e) { page--; this.SuspendForm(); this.RefreshLoot(); this.ResumeForm(); this.Refresh(); this.refreshTimer(); } private void Nextpage_Click(object sender, EventArgs e) { page++; this.SuspendForm(); this.RefreshLoot(); this.ResumeForm(); this.Refresh(); this.refreshTimer(); } void openItemBox(object sender, EventArgs e) { this.ReturnFocusToTibia(); CommandManager.ExecuteCommand("item" + Constants.CommandSymbol + (sender as Control).Name); } void openCreatureDrops(object sender, EventArgs e) { if (creatures.Keys.Count == 1) { CommandManager.ExecuteCommand("creature" + Constants.CommandSymbol + (sender as Control).Name); } else { CommandManager.ExecuteCommand(String.Format("loot{0}{1}{0}{2}{0}{3}", Constants.CommandSymbol, huntName, (sender as Control).Name, rawName)); } } private void huntNameLabel_Click(object sender, EventArgs e) { } private void rawLootButton_Click(object sender, EventArgs e) { rawName = "raw"; this.UpdateLootInternal(); this.UpdateCommand(); } private void allLootButton_Click(object sender, EventArgs e) { rawName = "all"; this.UpdateLootInternal(); this.UpdateCommand(); } private void lootButton_Click(object sender, EventArgs e) { rawName = ""; creatureName = ""; lootCreature = null; this.UpdateLootInternal(); this.UpdateCommand(); } private void UpdateCommand() { this.command.command = String.Format("loot{0}{1}{0}{2}{0}{3}", Constants.CommandSymbol, huntName, lootCreature == null ? "" : lootCreature.GetName(), rawName); } private void changeSize(int modification) { int max_x = SettingsManager.getSettingInt("LootFormWidth"); if (max_x < minLootWidth) max_x = minLootWidth; max_x += modification; if (max_x < minLootWidth) max_x = minLootWidth; SettingsManager.setSetting("LootFormWidth", (max_x).ToString()); this.SuspendForm(); this.RefreshLoot(); this.ResumeForm(); this.Refresh(); this.refreshTimer(); } private void lootSmaller_Click(object sender, EventArgs e) { changeSize(-36); } private void lootLarger_Click(object sender, EventArgs e) { changeSize(36); } public override string FormName() { return "LootDropForm"; } } }
using System; using System.Collections.Generic; using System.Text; using com.calitha.goldparser; using Epi.Core.EnterInterpreter; namespace Epi.Core.EnterInterpreter.Rules { /* <FuncName1> ::= ABS |COS |DAY|DAYS |ENVIRON|EXISTS|EXP |FILEDATE|FINDTEXT|FORMAT |HOUR|HOURS |LN|LOG |MINUTES|Month|MONTHS |NUMTODATE|NUMTOTIME |RECORDCOUNT|RND|ROUND |SECOND|SECONDS|STEP|SUBSTRING|SIN |TRUNC|TXTTODATE|TXTTONUM|TAN |UPPERCASE |YEAR|YEARS <FuncName2> ::= SYSTEMTIME|SYSTEMDATE <FunctionCall> ::= <FuncName1> '(' <FunctionParameterList> ')' | <FuncName1> '(' <FunctionCall> ')' | <FuncName2> <FunctionParameterList> ::= <EmptyFunctionParameterList> | <NonEmptyFunctionParameterList> <NonEmptyFunctionParameterList> ::= <MultipleFunctionParameterList> | <SingleFunctionParameterList> <MultipleFunctionParameterList> ::= <NonEmptyFunctionParameterList> ',' <Expression> <SingleFunctionParameterList> ::= <Expression> <EmptyFunctionParameterList> ::= */ /// <summary> /// Class for executing FunctionCall reductions. /// </summary> public partial class Rule_FunctionCall : EnterRule { private string functionName = null; private EnterRule functionCall = null; private string ClassName = null; private string MethodName = null; private List<EnterRule> ParameterList = new List<EnterRule>(); #region Constructors /// <summary> /// Constructor for Rule_FunctionCall /// </summary> /// <param name="pToken">The token to build the reduction with.</param> public Rule_FunctionCall(Rule_Context pContext, NonterminalToken pToken) : base(pContext) { /* <FunctionCall> ::= <FuncName1> '(' <FunctionParameterList> ')' | <FuncName1> '(' <FunctionCall> ')' | <FuncName2> */ NonterminalToken T; if (pToken.Tokens.Length == 1) { if (pToken.Rule.ToString().Equals("<FunctionCall>")) { T = (NonterminalToken)pToken.Tokens[0]; } else { T = pToken; } } else { T = (NonterminalToken)pToken.Tokens[2]; } string temp = null; string[] temp2 = null; if (pToken.Tokens[0] is NonterminalToken) { temp = this.ExtractTokens(((NonterminalToken)pToken.Tokens[0]).Tokens).Replace(" . ", "."); temp2 = temp.Split('.'); } else { temp = ((TerminalToken)pToken.Tokens[0]).Text.Replace(" . ", "."); } if(temp2 != null && temp2.Length > 1) { this.ClassName = temp2[0].Trim(); this.MethodName = temp2[1].Trim(); this.ParameterList = EnterRule.GetFunctionParameters(pContext, (NonterminalToken)pToken.Tokens[2]); } else { functionName = this.GetCommandElement(pToken.Tokens, 0).ToString(); switch (functionName.ToUpperInvariant()) { case "ABS": functionCall = new Rule_Abs(pContext, T); break; case "COS": functionCall = new Rule_Cos(pContext, T); break; case "CURRENTUSER": functionCall = new Rule_CurrentUser(pContext, T); break; case "DAY": functionCall = new Rule_Day(pContext, T); break; case "DAYS": functionCall = new Rule_Days(pContext, T); break; case "FORMAT": functionCall = new Rule_Format(pContext, T); break; case "HOUR": functionCall = new Rule_Hour(pContext, T); break; case "HOURS": functionCall = new Rule_Hours(pContext, T); break; case "ISUNIQUE": functionCall = new Rule_IsUnique(pContext, T); break; case "LINEBREAK": functionCall = new Rule_LineBreak(pContext, T); break; case "MINUTE": functionCall = new Rule_Minute(pContext, T); break; case "MINUTES": functionCall = new Rule_Minutes(pContext, T); break; case "MONTH": functionCall = new Rule_Month(pContext, T); break; case "MONTHS": functionCall = new Rule_Months(pContext, T); break; case "NUMTODATE": functionCall = new Rule_NumToDate(pContext, T); break; case "NUMTOTIME": functionCall = new Rule_NumToTime(pContext, T); break; case "RECORDCOUNT": functionCall = new Rule_RecordCount(pContext, T); break; case "SECOND": functionCall = new Rule_Second(pContext, T); break; case "SECONDS": functionCall = new Rule_Seconds(pContext, T); break; case "SQRT": functionCall = new Rule_SQRT_Func(pContext, T); break; case "SYSBARCODE": functionCall = new Rule_SystemBarcode(pContext, T); break; case "SYSLATITUDE": functionCall = new Rule_SystemLatitude(pContext, T); break; case "SYSLONGITUDE": functionCall = new Rule_SystemLongitude(pContext, T); break; case "SYSALTITUDE": functionCall = new Rule_SystemAltitude(pContext, T); break; case "SYSTEMDATE": functionCall = new Rule_SystemDate(pContext, T); break; case "SYSTEMTIME": functionCall = new Rule_SystemTime(pContext, T); break; case "TXTTODATE": functionCall = new Rule_TxtToDate(pContext, T); break; case "TXTTONUM": functionCall = new Rule_TxtToNum(pContext, T); break; case "YEAR": functionCall = new Rule_Year(pContext, T); break; case "YEARS": functionCall = new Rule_Years(pContext, T); break; case "SUBSTRING": functionCall = new Rule_Substring(pContext, T); break; case "RND": functionCall = new Rule_Rnd(pContext, T); break; case "EXP": functionCall = new Rule_Exp_Func(pContext, T); break; case "LN": functionCall = new Rule_LN_Func(pContext, T); break; case "ROUND": functionCall = new Rule_Round(pContext, T); break; case "LOG": functionCall = new Rule_LOG_Func(pContext, T); break; case "SIN": functionCall = new Rule_Sin(pContext, T); break; case "TAN": functionCall = new Rule_Tan(pContext, T); break; case "TRUNC": functionCall = new Rule_TRUNC(pContext, T); break; case "STEP": functionCall = new Rule_Step(pContext, T); break; case "UPPERCASE": functionCall = new Rule_UpperCase(pContext, T); break; case "FINDTEXT": functionCall = new Rule_FindText(pContext, T); break; case "ENVIRON": functionCall = new Rule_FindText(pContext, T); break; case "EXISTS": functionCall = new Rule_Exists(pContext, T); break; case "FILEDATE": functionCall = new Rule_FileDate(pContext, T); break; case "ZSCORE": functionCall = new Rule_ZSCORE(pContext, T); break; case "PFROMZ": functionCall = new Rule_PFROMZ(pContext, T); break; case "EPIWEEK": functionCall = new Rule_EPIWEEK(pContext, T); break; case "STRLEN": functionCall = new Rule_STRLEN(pContext, T); break; case "GETCOORDINATES": functionCall = new Rule_GetCoordinates(pContext, T); break; case "SENDSMS": functionCall = new Rule_SendSMS(pContext, T); break; default: throw new Exception("Function name " + functionName.ToUpperInvariant() + " is not a recognized function."); } } } #endregion #region Public Methods /// <summary> /// Executes the reduction. /// </summary> /// <returns>Returns the result of executing the reduction.</returns> public override object Execute() { object result = null; if (string.IsNullOrEmpty(this.functionName)) { if (this.Context.DLLClassList.ContainsKey(this.ClassName.ToLowerInvariant())) { object[] args = this.ParameterList.ToArray(); if (this.ParameterList.Count > 0) { args = new object[this.ParameterList.Count]; for (int i = 0; i < this.ParameterList.Count; i++) { args[i] = this.ParameterList[i].Execute(); } } else { args = new object[0]; } result = this.Context.DLLClassList[this.ClassName].Execute(this.MethodName, args); } } else { if (this.functionCall != null) { result = this.functionCall.Execute(); } } return result; } #endregion } /// <summary> /// Class for the FunctionParameterList reduction /// </summary> public partial class Rule_FunctionParameterList : EnterRule { public Stack<EnterRule> paramList = null; #region Constructors /// <summary> /// Constructor for Rule_FunctionParameterList /// </summary> /// <param name="pToken">The token to build the reduction with.</param> public Rule_FunctionParameterList(Rule_Context pContext, NonterminalToken pToken) : base(pContext) { //<FunctionParameterList> ::= <EmptyFunctionParameterList> //<FunctionParameterList> ::= <NonEmptyFunctionParameterList> NonterminalToken T = (NonterminalToken)pToken.Tokens[0]; switch (T.Rule.Lhs.ToString()) { case "<NonEmptyFunctionParameterList>": this.paramList = new Stack<EnterRule>(); //this.paramList.Push(new Rule_NonEmptyFunctionParameterList(T, this.paramList)); new Rule_NonEmptyFunctionParameterList(pContext, T, this.paramList); break; case "<SingleFunctionParameterList>": this.paramList = new Stack<EnterRule>(); new Rule_SingleFunctionParameterList(pContext, T, this.paramList); break; case "<EmptyFunctionParameterList>": //this.paramList = new Rule_EmptyFunctionParameterList(T); // do nothing the parameterlist is empty break; case "<MultipleFunctionParameterList>": this.paramList = new Stack<EnterRule>(); //this.MultipleParameterList = new Rule_MultipleFunctionParameterList(pToken); new Rule_MultipleFunctionParameterList(pContext, T, this.paramList); break; } } #endregion #region Public Methods /// <summary> /// rule to build zero or more funtion parameters builds parameters and allows the associated function to call the parameters when needed /// </summary> /// <returns>object</returns> public override object Execute() { object result = null; return result; } #endregion } /// <summary> /// Class for the Rule_EmptyFunctionParameterList reduction /// </summary> public partial class Rule_EmptyFunctionParameterList : EnterRule { #region Constructors public Rule_EmptyFunctionParameterList(Rule_Context pContext, NonterminalToken pToken) : base(pContext) { //<EmptyFunctionParameterList> ::= } #endregion #region Public Methods /// <summary> /// rule to return an empty parameter /// </summary> /// <returns>object</returns> public override object Execute() { return String.Empty; } #endregion } /// <summary> /// Class for the Rule_NonEmptyFunctionParameterList reduction. /// </summary> public partial class Rule_NonEmptyFunctionParameterList : EnterRule { protected Stack<EnterRule> MultipleParameterList = null; //private Reduction SingleParameterList = null; #region Constructors public Rule_NonEmptyFunctionParameterList(Rule_Context pContext, NonterminalToken pToken) : base(pContext) { //<NonEmptyFunctionParameterList> ::= <MultipleFunctionParameterList> //<NonEmptyFunctionParameterList> ::= <SingleFunctionParameterList> NonterminalToken T = (NonterminalToken) pToken.Tokens[0]; switch (T.Rule.Lhs.ToString()) { case "<MultipleFunctionParameterList>": this.MultipleParameterList = new Stack<EnterRule>(); //this.MultipleParameterList = new Rule_MultipleFunctionParameterList(pToken); new Rule_MultipleFunctionParameterList(pContext, T, this.MultipleParameterList); break; case "<SingleFunctionParameterList>": //this.SingleParameterList = new Rule_SingleFunctionParameterList(pToken); new Rule_SingleFunctionParameterList(pContext, T, this.MultipleParameterList); break; } } public Rule_NonEmptyFunctionParameterList(Rule_Context pContext, NonterminalToken pToken, Stack<EnterRule> pList) : base(pContext) { //<NonEmptyFunctionParameterList> ::= <MultipleFunctionParameterList> //<NonEmptyFunctionParameterList> ::= <SingleFunctionParameterList> NonterminalToken T = (NonterminalToken) pToken.Tokens[0]; switch (T.Rule.Lhs.ToString()) { case "<MultipleFunctionParameterList>": new Rule_MultipleFunctionParameterList(pContext, T, pList); break; case "<SingleFunctionParameterList>": new Rule_SingleFunctionParameterList(pContext, T, pList); break; default: break; } if (pToken.Tokens.Length > 2) { Rule_Expression Expression = new Rule_Expression(pContext, (NonterminalToken)pToken.Tokens[2]); pList.Push(Expression); } } #endregion #region Public Methods /// <summary> /// builds a multi parameters list which is executed in the calling function's execute method. /// </summary> /// <returns>object</returns> public override object Execute() { return null; } #endregion } /// <summary> /// Class for the Rule_MultipleFunctionParameterList reduction. /// </summary> public partial class Rule_MultipleFunctionParameterList : EnterRule { private EnterRule Expression = null; private EnterRule nonEmptyList = null; #region Constructors public Rule_MultipleFunctionParameterList(Rule_Context pContext, NonterminalToken pToken, Stack<EnterRule> pList) : base(pContext) { //<MultipleFunctionParameterList> ::= <NonEmptyFunctionParameterList> ',' <Expression> NonterminalToken nonEmptyToken = (NonterminalToken)pToken.Tokens[0]; NonterminalToken ExpressionToken = (NonterminalToken)pToken.Tokens[2]; // nonEmptyList = new Rule_NonEmptyFunctionParameterList(pContext, nonEmptyToken, pList); //this.Expression = new Rule_Expression(pContext, ExpressionToken); pList.Push(EnterRule.BuildStatments(pContext, nonEmptyToken)); pList.Push(EnterRule.BuildStatments(pContext, ExpressionToken)); //pList.Push(this.Expression); } #endregion #region Public Methods /// <summary> /// assists in building a multi parameters list which is executed in the calling function's execute method. /// </summary> /// <returns>object</returns> public override object Execute() { object result = null; //nonEmptyList.Execute(); //result = Expression.Execute(); return result; } #endregion } /// <summary> /// Class for the Rule_SingleFunctionParameterList reduction. /// </summary> public partial class Rule_SingleFunctionParameterList : EnterRule { private EnterRule Expression = null; #region Constructors public Rule_SingleFunctionParameterList(Rule_Context pContext, NonterminalToken pToken, Stack<EnterRule> pList) : base(pContext) { //<SingleFunctionParameterList> ::= <Expression> this.Expression = new Rule_Expression(pContext, (NonterminalToken)pToken.Tokens[0]); pList.Push(this.Expression); } #endregion #region Public Methods /// <summary> /// executes the parameter expression of a function. /// </summary> /// <returns>object</returns> public override object Execute() { object result = null; result = this.Expression.Execute(); return result; } #endregion } //**** //**** Not implemented yet, but on the list of features //**** //public partial class Rule_Uppercase : Reduction //{ // private Reduction functionCallOrParamList = null; // private string type; // private List<Reduction> reductions = new List<Reduction>(); // private List<object> reducedValues = null; // private string fullString = null; // public Rule_Uppercase(Rule_Context pContext, NonterminalToken pToken) : base(pContext) // { // //UPPERCASE(fullString) // NonterminalToken T = (NonterminalToken)pToken.Tokens[0]; // type = pToken.Rule.Lhs.ToString(); // switch (type) // { // case "<FunctionParameterList>": // this.functionCallOrParamList = new Rule_FunctionParameterList(T); // string tmp = this.GetCommandElement(pToken.Tokens, 0); // reductions.Add(new Rule_Value(tmp)); // break; // case "<FunctionCall>": // this.functionCallOrParamList = new Rule_FunctionCall(T); // break; // default: // break; // } // } // public override object Execute() // { // object result = null; // reducedValues = new List<object>(); // reducedValues.Add(FunctionUtils.StripQuotes(reductions[0].Execute().ToString())); // fullString = (string)reducedValues[0]; // result = fullString.ToUpperInvariant(); // return result; // } //} /// <summary> /// Utility class for helper methods for the Epi Functions. /// </summary> public static class FunctionUtils { public enum DateInterval { Second, Minute, Hour, Day, Month, Year } /// <summary> /// Gets the appropriate date value based on the date and interval. /// </summary> /// <param name="interval">The interval to retrieve from the date.</param> /// <param name="date">The date to get the value from.</param> /// <returns></returns> public static object GetDatePart(DateInterval interval, DateTime date) { object returnValue = null; switch (interval) { case DateInterval.Second: returnValue = date.Second; break; case DateInterval.Minute: returnValue = date.Minute; break; case DateInterval.Hour: returnValue = date.Hour; break; case DateInterval.Day: returnValue = date.Day; break; case DateInterval.Month: returnValue = date.Month; break; case DateInterval.Year: returnValue = date.Year; break; } return returnValue; } /// <summary> /// Gets the difference between two dates based on an interval. /// </summary> /// <param name="interval">The interval to use (seconds, minutes, hours, days, months, years)</param> /// <param name="date1">The date to use for comparison.</param> /// <param name="date2">The date to compare against the first date.</param> /// <returns></returns> public static object GetDateDiff(DateInterval interval, DateTime date1, DateTime date2) { object returnValue = null; TimeSpan t; double diff = 0; //returns negative value if date1 is more recent t = date2 - date1; switch (interval) { case DateInterval.Second: diff = t.TotalSeconds; break; case DateInterval.Minute: diff = t.TotalMinutes; break; case DateInterval.Hour: diff = t.TotalHours; break; case DateInterval.Day: diff = t.TotalDays; break; case DateInterval.Month: diff = t.TotalDays / 365.25 * 12.0; break; case DateInterval.Year: diff = t.TotalDays / 365.25; break; } returnValue = Convert.ToInt32(diff); return returnValue; } /// <summary> /// Removes all double quotes from a string. /// </summary> /// <param name="s">The string to remove quotes from.</param> /// <returns>Returns the modified string with no double quotes.</returns> public static string StripQuotes(string s) { return s.Trim(new char[] { '\"' }); } } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.IO; using System.Linq; using System.Net.Http; using System.Net.Http.Formatting; using System.Net.Http.Headers; using System.Web.Http.Description; using System.Xml.Linq; using Newtonsoft.Json; namespace Aglaia.API.Areas.HelpPage { /// <summary> /// This class will generate the samples for the help page. /// </summary> public class HelpPageSampleGenerator { /// <summary> /// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class. /// </summary> public HelpPageSampleGenerator() { ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>(); ActionSamples = new Dictionary<HelpPageSampleKey, object>(); SampleObjects = new Dictionary<Type, object>(); SampleObjectFactories = new List<Func<HelpPageSampleGenerator, Type, object>> { DefaultSampleObjectFactory, }; } /// <summary> /// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>. /// </summary> public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; } /// <summary> /// Gets the objects that are used directly as samples for certain actions. /// </summary> public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; } /// <summary> /// Gets the objects that are serialized as samples by the supported formatters. /// </summary> public IDictionary<Type, object> SampleObjects { get; internal set; } /// <summary> /// Gets factories for the objects that the supported formatters will serialize as samples. Processed in order, /// stopping when the factory successfully returns a non-<see langref="null"/> object. /// </summary> /// <remarks> /// Collection includes just <see cref="ObjectGenerator.GenerateObject(Type)"/> initially. Use /// <code>SampleObjectFactories.Insert(0, func)</code> to provide an override and /// <code>SampleObjectFactories.Add(func)</code> to provide a fallback.</remarks> [SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures", Justification = "This is an appropriate nesting of generic types")] public IList<Func<HelpPageSampleGenerator, Type, object>> SampleObjectFactories { get; private set; } /// <summary> /// Gets the request body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api) { return GetSample(api, SampleDirection.Request); } /// <summary> /// Gets the response body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api) { return GetSample(api, SampleDirection.Response); } /// <summary> /// Gets the request or response body samples. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The samples keyed by media type.</returns> public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection) { if (api == null) { throw new ArgumentNullException("api"); } string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName; string actionName = api.ActionDescriptor.ActionName; IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name); Collection<MediaTypeFormatter> formatters; Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters); var samples = new Dictionary<MediaTypeHeaderValue, object>(); // Use the samples provided directly for actions var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection); foreach (var actionSample in actionSamples) { samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value)); } // Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage. // Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters. if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type)) { object sampleObject = GetSampleObject(type); foreach (var formatter in formatters) { foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes) { if (!samples.ContainsKey(mediaType)) { object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection); // If no sample found, try generate sample using formatter and sample object if (sample == null && sampleObject != null) { sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType); } samples.Add(mediaType, WrapSampleIfString(sample)); } } } } return samples; } /// <summary> /// Search for samples that are provided directly through <see cref="ActionSamples"/>. /// </summary> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="type">The CLR type.</param> /// <param name="formatter">The formatter.</param> /// <param name="mediaType">The media type.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The sample that matches the parameters.</returns> public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection) { object sample; // First, try to get the sample provided for the specified mediaType, sampleDirection, controllerName, actionName and parameterNames. // If not found, try to get the sample provided for the specified mediaType, sampleDirection, controllerName and actionName regardless of the parameterNames. // If still not found, try to get the sample provided for the specified mediaType and type. // Finally, try to get the sample provided for the specified mediaType. if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType), out sample)) { return sample; } return null; } /// <summary> /// Gets the sample object that will be serialized by the formatters. /// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create /// one using <see cref="DefaultSampleObjectFactory"/> (which wraps an <see cref="ObjectGenerator"/>) and other /// factories in <see cref="SampleObjectFactories"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>The sample object.</returns> [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Even if all items in SampleObjectFactories throw, problem will be visible as missing sample.")] public virtual object GetSampleObject(Type type) { object sampleObject; if (!SampleObjects.TryGetValue(type, out sampleObject)) { // No specific object available, try our factories. foreach (Func<HelpPageSampleGenerator, Type, object> factory in SampleObjectFactories) { if (factory == null) { continue; } try { sampleObject = factory(this, type); if (sampleObject != null) { break; } } catch { // Ignore any problems encountered in the factory; go on to the next one (if any). } } } return sampleObject; } /// <summary> /// Resolves the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The type.</returns> public virtual Type ResolveHttpRequestMessageType(ApiDescription api) { string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName; string actionName = api.ActionDescriptor.ActionName; IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name); Collection<MediaTypeFormatter> formatters; return ResolveType(api, controllerName, actionName, parameterNames, SampleDirection.Request, out formatters); } /// <summary> /// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param> /// <param name="formatters">The formatters.</param> [SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")] public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters) { if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection)) { throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection)); } if (api == null) { throw new ArgumentNullException("api"); } Type type; if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) || ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type)) { // Re-compute the supported formatters based on type Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>(); foreach (var formatter in api.ActionDescriptor.Configuration.Formatters) { if (IsFormatSupported(sampleDirection, formatter, type)) { newFormatters.Add(formatter); } } formatters = newFormatters; } else { switch (sampleDirection) { case SampleDirection.Request: ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody); type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType; formatters = api.SupportedRequestBodyFormatters; break; case SampleDirection.Response: default: type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType; formatters = api.SupportedResponseFormatters; break; } } return type; } /// <summary> /// Writes the sample object using formatter. /// </summary> /// <param name="formatter">The formatter.</param> /// <param name="value">The value.</param> /// <param name="type">The type.</param> /// <param name="mediaType">Type of the media.</param> /// <returns></returns> [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")] public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType) { if (formatter == null) { throw new ArgumentNullException("formatter"); } if (mediaType == null) { throw new ArgumentNullException("mediaType"); } object sample = String.Empty; MemoryStream ms = null; HttpContent content = null; try { if (formatter.CanWriteType(type)) { ms = new MemoryStream(); content = new ObjectContent(type, value, formatter, mediaType); formatter.WriteToStreamAsync(type, value, ms, content, null).Wait(); ms.Position = 0; StreamReader reader = new StreamReader(ms); string serializedSampleString = reader.ReadToEnd(); if (mediaType.MediaType.ToUpperInvariant().Contains("XML")) { serializedSampleString = TryFormatXml(serializedSampleString); } else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON")) { serializedSampleString = TryFormatJson(serializedSampleString); } sample = new TextSample(serializedSampleString); } else { sample = new InvalidSample(String.Format( CultureInfo.CurrentCulture, "Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.", mediaType, formatter.GetType().Name, type.Name)); } } catch (Exception e) { sample = new InvalidSample(String.Format( CultureInfo.CurrentCulture, "An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}", formatter.GetType().Name, mediaType.MediaType, UnwrapException(e).Message)); } finally { if (ms != null) { ms.Dispose(); } if (content != null) { content.Dispose(); } } return sample; } internal static Exception UnwrapException(Exception exception) { AggregateException aggregateException = exception as AggregateException; if (aggregateException != null) { return aggregateException.Flatten().InnerException; } return exception; } // Default factory for sample objects private static object DefaultSampleObjectFactory(HelpPageSampleGenerator sampleGenerator, Type type) { // Try to create a default sample object ObjectGenerator objectGenerator = new ObjectGenerator(); return objectGenerator.GenerateObject(type); } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatJson(string str) { try { object parsedJson = JsonConvert.DeserializeObject(str); return JsonConvert.SerializeObject(parsedJson, Formatting.Indented); } catch { // can't parse JSON, return the original string return str; } } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatXml(string str) { try { XDocument xml = XDocument.Parse(str); return xml.ToString(); } catch { // can't parse XML, return the original string return str; } } private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type) { switch (sampleDirection) { case SampleDirection.Request: return formatter.CanReadType(type); case SampleDirection.Response: return formatter.CanWriteType(type); } return false; } private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection) { HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase); foreach (var sample in ActionSamples) { HelpPageSampleKey sampleKey = sample.Key; if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) && String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) && (sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) && sampleDirection == sampleKey.SampleDirection) { yield return sample; } } } private static object WrapSampleIfString(object sample) { string stringSample = sample as string; if (stringSample != null) { return new TextSample(stringSample); } return sample; } } }
using UnityEngine; using System.Collections; using System.Collections.Generic; using System.IO; using System; using System.Text; using System.Runtime.InteropServices; using System.Linq; public class DictationRecognizer : MonoBehaviour { public delegate void OnStartRecordingDelegate(); public event OnStartRecordingDelegate OnStartRecording = delegate { }; public delegate void OnHypothesisDelegate(string hypothesis); public event OnHypothesisDelegate OnHypothesis = delegate { }; public delegate void OnStopRecordingDelegate(string result, bool success); public event OnStopRecordingDelegate OnStopRecording = delegate { }; #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN public UnityEngine.Windows.Speech.DictationRecognizer m_dictationRecognizer; #endif bool m_isRecording = false; string m_errorMessage = "This system is not configured properly to use Speech Recognition"; public bool IsRecording { get { return m_isRecording; } } public bool PhraseRecognitionSystemIsSupported { get { if (VHUtils.IsWindows10OrGreater()) { #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN return UnityEngine.Windows.Speech.PhraseRecognitionSystem.isSupported; #endif } return false; } } public string PhraseRecognitionSystemStatus { get { if (VHUtils.IsWindows10OrGreater()) { #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN return UnityEngine.Windows.Speech.PhraseRecognitionSystem.Status.ToString(); #endif } return "Not Supported."; } } public string DictationRecognizerStatus { get { if (VHUtils.IsWindows10OrGreater()) { #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN return m_dictationRecognizer.Status.ToString(); #endif } return "Not Supported."; } } public float AutoSilenceTimeoutSeconds { get { if (VHUtils.IsWindows10OrGreater()) { #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN return m_dictationRecognizer.AutoSilenceTimeoutSeconds; #endif } return 0; } set { if (VHUtils.IsWindows10OrGreater()) { #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN m_dictationRecognizer.AutoSilenceTimeoutSeconds = value; #endif } } } public float InitialSilenceTimeoutSeconds { get { if (VHUtils.IsWindows10OrGreater()) { #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN return m_dictationRecognizer.InitialSilenceTimeoutSeconds; #endif } return 0; } set { if (VHUtils.IsWindows10OrGreater()) { #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN m_dictationRecognizer.InitialSilenceTimeoutSeconds = value; #endif } } } void Start() { if (VHUtils.IsWindows10OrGreater()) { #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN m_dictationRecognizer = new UnityEngine.Windows.Speech.DictationRecognizer(); m_dictationRecognizer.DictationResult += (text, confidence) => { Debug.LogFormat("Dictation result: {0}", text); OnStopRecording(text, true); StopRecording(); }; m_dictationRecognizer.DictationHypothesis += (text) => { Debug.LogFormat("Dictation hypothesis: {0}", text); OnHypothesis(text); }; m_dictationRecognizer.DictationComplete += (completionCause) => { if (completionCause != UnityEngine.Windows.Speech.DictationCompletionCause.Complete) { Debug.LogErrorFormat("Dictation completed unsuccessfully: {0}.", completionCause); OnStopRecording(completionCause.ToString(), false); } }; m_dictationRecognizer.DictationError += (error, hresult) => { string errorString = string.Format("Dictation error: {0}; HResult = {1}.", error, hresult); Debug.LogErrorFormat(errorString); OnStopRecording(errorString, false); }; m_dictationRecognizer.InitialSilenceTimeoutSeconds = 999; #endif } } void Update() { } public void StartRecording() { if (VHUtils.IsWindows10OrGreater()) { if (!m_isRecording) { m_isRecording = true; #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN m_dictationRecognizer.Start(); #endif OnStartRecording(); } } else { OnStopRecording(m_errorMessage, false); } } public void StopRecording() { if (VHUtils.IsWindows10OrGreater()) { if (m_isRecording) { m_isRecording = false; #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN m_dictationRecognizer.Stop(); #endif } } } #if false void OnGUIASR() { if (VHUtils.IsWindows10OrGreater()) { #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN GUILayout.Label(string.Format("PhraseRecogntionSystem.isSupported: {0}", UnityEngine.Windows.Speech.PhraseRecognitionSystem.isSupported)); GUILayout.Label(string.Format("PhraseRecogntionSystem.status: {0}", UnityEngine.Windows.Speech.PhraseRecognitionSystem.Status)); if (GUILayout.Button("PhraseRecogntionSystem.Restart()")) { UnityEngine.Windows.Speech.PhraseRecognitionSystem.Restart(); } if (GUILayout.Button("PhraseRecogntionSystem.Stop()")) { UnityEngine.Windows.Speech.PhraseRecognitionSystem.Shutdown(); } GUILayout.Label(string.Format("Dictation.status: {0}", m_dictationRecognizer.Status)); GUILayout.Label(string.Format("Dictation.AutoSilenceTimeoutSeconds: {0}", m_dictationRecognizer.AutoSilenceTimeoutSeconds)); GUILayout.Label(string.Format("Dictation.InitialSilenceTimeoutSeconds: {0}", m_dictationRecognizer.InitialSilenceTimeoutSeconds)); if (GUILayout.Button("Dictation Recognizer Setup")) { m_dictationRecognizer.DictationResult += (text, confidence) => { Debug.LogFormat("Dictation result: {0}", text); }; m_dictationRecognizer.DictationHypothesis += (text) => { Debug.LogFormat("Dictation hypothesis: {0}", text); }; m_dictationRecognizer.DictationComplete += (completionCause) => { if (completionCause != UnityEngine.Windows.Speech.DictationCompletionCause.Complete) Debug.LogErrorFormat("Dictation completed unsuccessfully: {0}.", completionCause); }; m_dictationRecognizer.DictationError += (error, hresult) => { Debug.LogErrorFormat("Dictation error: {0}; HResult = {1}.", error, hresult); }; m_dictationRecognizer.Start(); } if (GUILayout.Button("Dictation Recognizer Stop")) { m_dictationRecognizer.Stop(); } if (GUILayout.Button("Dictation Recognizer Dispose")) { m_dictationRecognizer.Dispose(); } GUILayout.Space(10); #endif } } #endif }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.Linq; using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.Mvc.ModelBinding; using Microsoft.AspNetCore.Mvc.ModelBinding.Validation; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Localization; using Moq; using Xunit; namespace Microsoft.AspNetCore.Mvc.DataAnnotations { public class DataAnnotationsModelValidatorTest { private static readonly ModelMetadataProvider _metadataProvider = TestModelMetadataProvider.CreateDefaultProvider(); [Fact] public void Constructor_SetsAttribute() { // Arrange var attribute = new RequiredAttribute(); // Act var validator = new DataAnnotationsModelValidator( new ValidationAttributeAdapterProvider(), attribute, stringLocalizer: null); // Assert Assert.Same(attribute, validator.Attribute); } public static TheoryData<ModelMetadata, object, object, string> Validate_SetsMemberName_AsExpectedData { get { var array = new[] { new SampleModel { Name = "one" }, new SampleModel { Name = "two" } }; var method = typeof(ModelValidationResultComparer).GetMethod( nameof(ModelValidationResultComparer.GetHashCode), new[] { typeof(ModelValidationResult) }); var parameter = method.GetParameters()[0]; // GetHashCode(ModelValidationResult obj) // metadata, container, model, expected MemberName return new TheoryData<ModelMetadata, object, object, string> { { _metadataProvider.GetMetadataForProperty(typeof(string), nameof(string.Length)), "Hello", "Hello".Length, nameof(string.Length) }, { // Validating a top-level property. _metadataProvider.GetMetadataForProperty(typeof(SampleModel), nameof(SampleModel.Name)), null, "Fred", nameof(SampleModel.Name) }, { // Validating a parameter. _metadataProvider.GetMetadataForParameter(parameter), null, new ModelValidationResult(memberName: string.Empty, message: string.Empty), "obj" }, { // Validating a top-level parameter as if using old-fashioned metadata provider. _metadataProvider.GetMetadataForType(typeof(SampleModel)), null, 15, null }, { // Validating an element in a collection. _metadataProvider.GetMetadataForType(typeof(SampleModel)), array, array[1], null }, }; } } [Theory] [MemberData(nameof(Validate_SetsMemberName_AsExpectedData))] public void Validate_SetsMemberName_AsExpected( ModelMetadata metadata, object container, object model, string expectedMemberName) { // Arrange var attribute = new Mock<TestableValidationAttribute> { CallBase = true }; attribute .Setup(p => p.IsValidPublic(It.IsAny<object>(), It.IsAny<ValidationContext>())) .Callback((object o, ValidationContext context) => { Assert.Equal(expectedMemberName, context.MemberName); }) .Returns(ValidationResult.Success) .Verifiable(); var validator = new DataAnnotationsModelValidator( new ValidationAttributeAdapterProvider(), attribute.Object, stringLocalizer: null); var validationContext = new ModelValidationContext( actionContext: new ActionContext(), modelMetadata: metadata, metadataProvider: _metadataProvider, container: container, model: model); // Act var results = validator.Validate(validationContext); // Assert Assert.Empty(results); attribute.VerifyAll(); } [Fact] public void Validate_Valid() { // Arrange var metadata = _metadataProvider.GetMetadataForType(typeof(string)); var container = "Hello"; var model = container.Length; var attribute = new Mock<ValidationAttribute> { CallBase = true }; attribute.Setup(a => a.IsValid(model)).Returns(true); var validator = new DataAnnotationsModelValidator( new ValidationAttributeAdapterProvider(), attribute.Object, stringLocalizer: null); var validationContext = new ModelValidationContext( actionContext: new ActionContext(), modelMetadata: metadata, metadataProvider: _metadataProvider, container: container, model: model); // Act var result = validator.Validate(validationContext); // Assert Assert.Empty(result); } [Fact] public void Validate_Invalid() { // Arrange var metadata = _metadataProvider.GetMetadataForProperty(typeof(string), "Length"); var container = "Hello"; var model = container.Length; var attribute = new Mock<ValidationAttribute> { CallBase = true }; attribute.Setup(a => a.IsValid(model)).Returns(false); var validator = new DataAnnotationsModelValidator( new ValidationAttributeAdapterProvider(), attribute.Object, stringLocalizer: null); var validationContext = new ModelValidationContext( actionContext: new ActionContext(), modelMetadata: metadata, metadataProvider: _metadataProvider, container: container, model: model); // Act var result = validator.Validate(validationContext); // Assert var validationResult = result.Single(); Assert.Empty(validationResult.MemberName); Assert.Equal(attribute.Object.FormatErrorMessage("Length"), validationResult.Message); } [Fact] public void Validate_ValidationResultSuccess() { // Arrange var metadata = _metadataProvider.GetMetadataForType(typeof(string)); var container = "Hello"; var model = container.Length; var attribute = new Mock<TestableValidationAttribute> { CallBase = true }; attribute .Setup(p => p.IsValidPublic(It.IsAny<object>(), It.IsAny<ValidationContext>())) .Returns(ValidationResult.Success); var validator = new DataAnnotationsModelValidator( new ValidationAttributeAdapterProvider(), attribute.Object, stringLocalizer: null); var validationContext = new ModelValidationContext( actionContext: new ActionContext(), modelMetadata: metadata, metadataProvider: _metadataProvider, container: container, model: model); // Act var result = validator.Validate(validationContext); // Assert Assert.Empty(result); } [Fact] public void Validate_RequiredButNullAtTopLevel_Invalid() { // Arrange var metadata = _metadataProvider.GetMetadataForProperty(typeof(string), "Length"); var validator = new DataAnnotationsModelValidator( new ValidationAttributeAdapterProvider(), new RequiredAttribute(), stringLocalizer: null); var validationContext = new ModelValidationContext( actionContext: new ActionContext(), modelMetadata: metadata, metadataProvider: _metadataProvider, container: null, model: null); // Act var result = validator.Validate(validationContext); // Assert var validationResult = result.Single(); Assert.Empty(validationResult.MemberName); Assert.Equal(new RequiredAttribute().FormatErrorMessage("Length"), validationResult.Message); } [Fact] public void Validate_RequiredAndNotNullAtTopLevel_Valid() { // Arrange var metadata = _metadataProvider.GetMetadataForProperty(typeof(string), "Length"); var validator = new DataAnnotationsModelValidator( new ValidationAttributeAdapterProvider(), new RequiredAttribute(), stringLocalizer: null); var validationContext = new ModelValidationContext( actionContext: new ActionContext(), modelMetadata: metadata, metadataProvider: _metadataProvider, container: null, model: 123); // Act var result = validator.Validate(validationContext); // Assert Assert.Empty(result); } public static TheoryData<string, IEnumerable<string>, IEnumerable<ModelValidationResult>> Validate_ReturnsExpectedResults_Data { get { var errorMessage = "Some error message"; return new TheoryData<string, IEnumerable<string>, IEnumerable<ModelValidationResult>> { { errorMessage, null, new[] { new ModelValidationResult(memberName: string.Empty, message: errorMessage) } }, { errorMessage, Enumerable.Empty<string>(), new[] { new ModelValidationResult(memberName: string.Empty, message: errorMessage) } }, { errorMessage, new[] { (string)null }, new[] { new ModelValidationResult(memberName: string.Empty, message: errorMessage) } }, { errorMessage, new[] { string.Empty }, new[] { new ModelValidationResult(memberName: string.Empty, message: errorMessage) } }, { errorMessage, // Name matches ValidationContext.MemberName. new[] { nameof(string.Length) }, new[] { new ModelValidationResult(memberName: string.Empty, message: errorMessage) } }, { errorMessage, new[] { "AnotherName" }, new[] { new ModelValidationResult(memberName: "AnotherName", message: errorMessage) } }, { errorMessage, new[] { "[1]" }, new[] { new ModelValidationResult(memberName: "[1]", message: errorMessage) } }, { errorMessage, new[] { "Name1", "Name2" }, new[] { new ModelValidationResult(memberName: "Name1", message: errorMessage), new ModelValidationResult(memberName: "Name2", message: errorMessage), } }, { errorMessage, new[] { "[0]", "[2]" }, new[] { new ModelValidationResult(memberName: "[0]", message: errorMessage), new ModelValidationResult(memberName: "[2]", message: errorMessage), } }, }; } } [Theory] [MemberData(nameof(Validate_ReturnsExpectedResults_Data))] public void Validate_ReturnsExpectedResults( string errorMessage, IEnumerable<string> memberNames, IEnumerable<ModelValidationResult> expectedResults) { // Arrange var metadata = _metadataProvider.GetMetadataForProperty(typeof(string), nameof(string.Length)); var container = "Hello"; var model = container.Length; var attribute = new Mock<TestableValidationAttribute> { CallBase = true }; attribute .Setup(p => p.IsValidPublic(It.IsAny<object>(), It.IsAny<ValidationContext>())) .Returns(new ValidationResult(errorMessage, memberNames)); var validator = new DataAnnotationsModelValidator( new ValidationAttributeAdapterProvider(), attribute.Object, stringLocalizer: null); var validationContext = new ModelValidationContext( actionContext: new ActionContext(), modelMetadata: metadata, metadataProvider: _metadataProvider, container: container, model: model); // Act var results = validator.Validate(validationContext); // Assert Assert.Equal(expectedResults, results, ModelValidationResultComparer.Instance); } [Fact] public void Validate_IsValidFalse_StringLocalizerReturnsLocalizerErrorMessage() { // Arrange var metadata = _metadataProvider.GetMetadataForType(typeof(string)); var container = "Hello"; var attribute = new MaxLengthAttribute(4); attribute.ErrorMessage = "{0} should have no more than {1} characters."; var localizedString = new LocalizedString(attribute.ErrorMessage, "Longueur est invalide : 4"); var stringLocalizer = new Mock<IStringLocalizer>(); stringLocalizer.Setup(s => s[attribute.ErrorMessage, It.IsAny<object[]>()]).Returns(localizedString); var validator = new DataAnnotationsModelValidator( new ValidationAttributeAdapterProvider(), attribute, stringLocalizer.Object); var validationContext = new ModelValidationContext( actionContext: new ActionContext(), modelMetadata: metadata, metadataProvider: _metadataProvider, container: container, model: "abcde"); // Act var result = validator.Validate(validationContext); // Assert var validationResult = result.Single(); Assert.Empty(validationResult.MemberName); Assert.Equal("Longueur est invalide : 4", validationResult.Message); } [Fact] public void Validate_CanUseRequestServices_WithinValidationAttribute() { // Arrange var service = new Mock<IExampleService>(); service.Setup(x => x.DoSomething()).Verifiable(); var provider = new ServiceCollection().AddSingleton(service.Object).BuildServiceProvider(); var httpContext = new Mock<HttpContext>(); httpContext.SetupGet(x => x.RequestServices).Returns(provider); var attribute = new Mock<TestableValidationAttribute> { CallBase = true }; attribute .Setup(p => p.IsValidPublic(It.IsAny<object>(), It.IsAny<ValidationContext>())) .Callback((object o, ValidationContext context) => { var receivedService = context.GetService<IExampleService>(); Assert.Equal(service.Object, receivedService); receivedService.DoSomething(); }); var validator = new DataAnnotationsModelValidator( new ValidationAttributeAdapterProvider(), attribute.Object, stringLocalizer: null); var validationContext = new ModelValidationContext( actionContext: new ActionContext { HttpContext = httpContext.Object }, modelMetadata: _metadataProvider.GetMetadataForType(typeof(object)), metadataProvider: _metadataProvider, container: null, model: new object()); // Act var results = validator.Validate(validationContext); // Assert service.Verify(); } private const string LocalizationKey = "LocalizeIt"; public static TheoryData Validate_AttributesIncludeValues { get { var pattern = "apattern"; var length = 5; var regex = "^((?!" + pattern + ").)*$"; return new TheoryData<ValidationAttribute, string, object[]> { { new RegularExpressionAttribute(regex) { ErrorMessage = LocalizationKey }, pattern, new object[] { nameof(SampleModel), regex } }, { new MaxLengthAttribute(length) { ErrorMessage = LocalizationKey }, pattern, new object[] { nameof(SampleModel), length }}, { new MaxLengthAttribute(length) { ErrorMessage = LocalizationKey }, pattern, new object[] { nameof(SampleModel), length } }, { new CompareAttribute(pattern) { ErrorMessage = LocalizationKey }, pattern, new object[] { nameof(SampleModel), pattern }}, { new MinLengthAttribute(length) { ErrorMessage = LocalizationKey }, "a", new object[] { nameof(SampleModel), length } }, { new CreditCardAttribute() { ErrorMessage = LocalizationKey }, pattern, new object[] { nameof(SampleModel), "CreditCard" } }, { new StringLengthAttribute(length) { ErrorMessage = LocalizationKey, MinimumLength = 1}, string.Empty, new object[] { nameof(SampleModel), length, 1 } }, { new RangeAttribute(0, length) { ErrorMessage = LocalizationKey }, pattern, new object[] { nameof(SampleModel), 0, length} }, { new EmailAddressAttribute() { ErrorMessage = LocalizationKey }, pattern, new object[] { nameof(SampleModel), "EmailAddress" } }, { new PhoneAttribute() { ErrorMessage = LocalizationKey }, pattern, new object[] { nameof(SampleModel), "PhoneNumber" } }, { new UrlAttribute() { ErrorMessage = LocalizationKey }, pattern, new object[] { nameof(SampleModel), "Url" } } }; } } [Theory] [MemberData(nameof(Validate_AttributesIncludeValues))] public void Validate_IsValidFalse_StringLocalizerGetsArguments( ValidationAttribute attribute, string model, object[] values) { // Arrange var stringLocalizer = new Mock<IStringLocalizer>(); var validator = new DataAnnotationsModelValidator( new ValidationAttributeAdapterProvider(), attribute, stringLocalizer.Object); var metadata = _metadataProvider.GetMetadataForType(typeof(SampleModel)); var validationContext = new ModelValidationContext( actionContext: new ActionContext(), modelMetadata: metadata, metadataProvider: _metadataProvider, container: null, model: model); // Act validator.Validate(validationContext); // Assert var json = Newtonsoft.Json.JsonConvert.SerializeObject(values) + " " + attribute.GetType().Name; stringLocalizer.Verify(l => l[LocalizationKey, values], json); } public abstract class TestableValidationAttribute : ValidationAttribute { protected override ValidationResult IsValid(object value, ValidationContext validationContext) { return IsValidPublic(value, validationContext); } public abstract ValidationResult IsValidPublic(object value, ValidationContext validationContext); } private class SampleModel { public string Name { get; set; } } public interface IExampleService { void DoSomething(); } } }
/* New BSD License ------------------------------------------------------------------------------- Copyright (c) 2006-2012, EntitySpaces, LLC All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the EntitySpaces, LLC nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL EntitySpaces, LLC BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ------------------------------------------------------------------------------- */ using System; using System.Collections; using System.Collections.Generic; using System.Data; using Tiraggo.DynamicQuery; using Tiraggo.Interfaces; using Npgsql; namespace Tiraggo.Npgsql2Provider { class QueryBuilder { public static NpgsqlCommand PrepareCommand(tgDataRequest request) { StandardProviderParameters std = new StandardProviderParameters(); std.cmd = new NpgsqlCommand(); std.pindex = NextParamIndex(std.cmd); std.request = request; string sql = BuildQuery(std, request.DynamicQuery); std.cmd.CommandText = sql; return (NpgsqlCommand)std.cmd; } protected static string BuildQuery(StandardProviderParameters std, tgDynamicQuerySerializable query) { bool paging = false; if (query.tg.PageNumber.HasValue && query.tg.PageSize.HasValue) paging = true; IDynamicQuerySerializableInternal iQuery = query as IDynamicQuerySerializableInternal; string select = GetSelectStatement(std, query); string from = GetFromStatement(std, query); string join = GetJoinStatement(std, query); string where = GetComparisonStatement(std, query, iQuery.InternalWhereItems, " WHERE "); string groupBy = GetGroupByStatement(std, query); string having = GetComparisonStatement(std, query, iQuery.InternalHavingItems, " HAVING "); string orderBy = GetOrderByStatement(std, query); string setOperation = GetSetOperationStatement(std, query); string sql = String.Empty; sql += "SELECT " + select + " FROM " + from + join + where + setOperation + groupBy + having + orderBy; if (paging) { int begRow = ((query.tg.PageNumber.Value - 1) * query.tg.PageSize.Value); sql += " LIMIT " + query.tg.PageSize.ToString(); sql += " OFFSET " + begRow.ToString() + " "; } else if (query.tg.Top >= 0) { sql += " LIMIT " + query.tg.Top.ToString() + " "; } else if (iQuery.Skip.HasValue || iQuery.Take.HasValue) { if (iQuery.Take.HasValue) { sql += " LIMIT " + iQuery.Take.ToString() + " "; } if (iQuery.Skip.HasValue) { sql += " OFFSET " + iQuery.Skip.ToString() + " "; } } return sql; } protected static string GetFromStatement(StandardProviderParameters std, tgDynamicQuerySerializable query) { IDynamicQuerySerializableInternal iQuery = query as IDynamicQuerySerializableInternal; string sql = String.Empty; if (iQuery.InternalFromQuery == null) { sql = Shared.CreateFullName(std.request, query); if (iQuery.JoinAlias != " ") { sql += " " + iQuery.JoinAlias; } } else { IDynamicQuerySerializableInternal iSubQuery = iQuery.InternalFromQuery as IDynamicQuerySerializableInternal; iSubQuery.IsInSubQuery = true; sql += "("; sql += BuildQuery(std, iQuery.InternalFromQuery); sql += ")"; if (iSubQuery.SubQueryAlias != " ") { sql += " AS " + Delimiters.ColumnOpen + iSubQuery.SubQueryAlias + Delimiters.ColumnClose; } iSubQuery.IsInSubQuery = false; } return sql; } protected static string GetSelectStatement(StandardProviderParameters std, tgDynamicQuerySerializable query) { string sql = String.Empty; string comma = String.Empty; bool selectAll = true; IDynamicQuerySerializableInternal iQuery = query as IDynamicQuerySerializableInternal; if (query.tg.Distinct) sql += " DISTINCT "; if (iQuery.InternalSelectColumns != null) { selectAll = false; foreach (tgExpression expressionItem in iQuery.InternalSelectColumns) { if (expressionItem.Query != null) { IDynamicQuerySerializableInternal iSubQuery = expressionItem.Query as IDynamicQuerySerializableInternal; sql += comma; if (iSubQuery.SubQueryAlias == string.Empty) { sql += iSubQuery.JoinAlias + ".*"; } else { iSubQuery.IsInSubQuery = true; sql += " (" + BuildQuery(std, expressionItem.Query as tgDynamicQuerySerializable) + ") AS " + Delimiters.ColumnOpen + iSubQuery.SubQueryAlias + Delimiters.ColumnClose; iSubQuery.IsInSubQuery = false; } comma = ","; } else { sql += comma; string columnName = expressionItem.Column.Name; if (columnName != null && columnName[0] == '<') sql += columnName.Substring(1, columnName.Length - 2); else sql += GetExpressionColumn(std, query, expressionItem, false, true); comma = ","; } } sql += " "; } if (query.tg.CountAll) { selectAll = false; sql += comma; sql += "COUNT(*)"; if (query.tg.CountAllAlias != null) { // Need DBMS string delimiter here sql += " AS " + Delimiters.ColumnOpen + query.tg.CountAllAlias + Delimiters.ColumnClose; } } if (selectAll) { sql += "*"; } return sql; } protected static string GetJoinStatement(StandardProviderParameters std, tgDynamicQuerySerializable query) { string sql = String.Empty; IDynamicQuerySerializableInternal iQuery = query as IDynamicQuerySerializableInternal; if (iQuery.InternalJoinItems != null) { foreach (tgJoinItem joinItem in iQuery.InternalJoinItems) { tgJoinItem.tgJoinItemData joinData = (tgJoinItem.tgJoinItemData)joinItem; switch (joinData.JoinType) { case tgJoinType.InnerJoin: sql += " INNER JOIN "; break; case tgJoinType.LeftJoin: sql += " LEFT JOIN "; break; case tgJoinType.RightJoin: sql += " RIGHT JOIN "; break; case tgJoinType.FullJoin: sql += " FULL JOIN "; break; } IDynamicQuerySerializableInternal iSubQuery = joinData.Query as IDynamicQuerySerializableInternal; sql += Shared.CreateFullName(std.request, joinData.Query); sql += " " + iSubQuery.JoinAlias + " ON "; sql += GetComparisonStatement(std, query, joinData.WhereItems, String.Empty); } } return sql; } protected static string GetComparisonStatement(StandardProviderParameters std, tgDynamicQuerySerializable query, List<tgComparison> items, string prefix) { string sql = String.Empty; string comma = String.Empty; IDynamicQuerySerializableInternal iQuery = query as IDynamicQuerySerializableInternal; //======================================= // WHERE //======================================= if (items != null) { sql += prefix; string compareTo = String.Empty; foreach (tgComparison comparisonItem in items) { tgComparison.tgComparisonData comparisonData = (tgComparison.tgComparisonData)comparisonItem; tgDynamicQuerySerializable subQuery = null; bool requiresParam = true; bool needsStringParameter = false; if (comparisonData.IsParenthesis) { if (comparisonData.Parenthesis == tgParenthesis.Open) sql += "("; else sql += ")"; continue; } if (comparisonData.IsConjunction) { switch (comparisonData.Conjunction) { case tgConjunction.And: sql += " AND "; break; case tgConjunction.Or: sql += " OR "; break; case tgConjunction.AndNot: sql += " AND NOT "; break; case tgConjunction.OrNot: sql += " OR NOT "; break; } continue; } Dictionary<string, NpgsqlParameter> types = null; if (comparisonData.Column.Query != null) { IDynamicQuerySerializableInternal iLocalQuery = comparisonData.Column.Query as IDynamicQuerySerializableInternal; types = Cache.GetParameters(iLocalQuery.DataID, (tgProviderSpecificMetadata)iLocalQuery.ProviderMetadata, (tgColumnMetadataCollection)iLocalQuery.Columns); } if (comparisonData.IsLiteral) { if (comparisonData.Column.Name[0] == '<') { sql += comparisonData.Column.Name.Substring(1, comparisonData.Column.Name.Length - 2); } else { sql += comparisonData.Column.Name; } continue; } if (comparisonData.ComparisonColumn.Name == null) { subQuery = comparisonData.Value as tgDynamicQuerySerializable; if (subQuery == null) { if (comparisonData.Column.Name != null) { IDynamicQuerySerializableInternal iColQuery = comparisonData.Column.Query as IDynamicQuerySerializableInternal; tgColumnMetadataCollection columns = (tgColumnMetadataCollection)iColQuery.Columns; compareTo = Delimiters.Param + columns[comparisonData.Column.Name].PropertyName + (++std.pindex).ToString(); } else { compareTo = Delimiters.Param + "Expr" + (++std.pindex).ToString(); } } else { // It's a sub query compareTo = GetSubquerySearchCondition(subQuery) + " (" + BuildQuery(std, subQuery) + ") "; requiresParam = false; } } else { compareTo = GetColumnName(comparisonData.ComparisonColumn); requiresParam = false; } switch (comparisonData.Operand) { case tgComparisonOperand.Exists: sql += " EXISTS" + compareTo; break; case tgComparisonOperand.NotExists: sql += " NOT EXISTS" + compareTo; break; //----------------------------------------------------------- // Comparison operators, left side vs right side //----------------------------------------------------------- case tgComparisonOperand.Equal: if (comparisonData.ItemFirst) sql += ApplyWhereSubOperations(std, query, comparisonData) + " = " + compareTo; else sql += compareTo + " = " + ApplyWhereSubOperations(std, query, comparisonData); break; case tgComparisonOperand.NotEqual: if (comparisonData.ItemFirst) sql += ApplyWhereSubOperations(std, query, comparisonData) + " <> " + compareTo; else sql += compareTo + " <> " + ApplyWhereSubOperations(std, query, comparisonData); break; case tgComparisonOperand.GreaterThan: if (comparisonData.ItemFirst) sql += ApplyWhereSubOperations(std, query, comparisonData) + " > " + compareTo; else sql += compareTo + " > " + ApplyWhereSubOperations(std, query, comparisonData); break; case tgComparisonOperand.LessThan: if (comparisonData.ItemFirst) sql += ApplyWhereSubOperations(std, query, comparisonData) + " < " + compareTo; else sql += compareTo + " < " + ApplyWhereSubOperations(std, query, comparisonData); break; case tgComparisonOperand.LessThanOrEqual: if (comparisonData.ItemFirst) sql += ApplyWhereSubOperations(std, query, comparisonData) + " <= " + compareTo; else sql += compareTo + " <= " + ApplyWhereSubOperations(std, query, comparisonData); break; case tgComparisonOperand.GreaterThanOrEqual: if (comparisonData.ItemFirst) sql += ApplyWhereSubOperations(std, query, comparisonData) + " >= " + compareTo; else sql += compareTo + " >= " + ApplyWhereSubOperations(std, query, comparisonData); break; case tgComparisonOperand.Like: string esc = comparisonData.LikeEscape.ToString(); if (String.IsNullOrEmpty(esc) || esc == "\0") { sql += ApplyWhereSubOperations(std, query, comparisonData) + " LIKE " + compareTo; needsStringParameter = true; } else { sql += ApplyWhereSubOperations(std, query, comparisonData) + " LIKE " + compareTo; sql += " ESCAPE '" + esc + "'"; needsStringParameter = true; } break; case tgComparisonOperand.NotLike: esc = comparisonData.LikeEscape.ToString(); if (String.IsNullOrEmpty(esc) || esc == "\0") { sql += ApplyWhereSubOperations(std, query, comparisonData) + " NOT LIKE " + compareTo; needsStringParameter = true; } else { sql += ApplyWhereSubOperations(std, query, comparisonData) + " NOT LIKE " + compareTo; sql += " ESCAPE '" + esc + "'"; needsStringParameter = true; } break; case tgComparisonOperand.Contains: sql += " CONTAINS(" + GetColumnName(comparisonData.Column) + ", " + compareTo + ")"; needsStringParameter = true; break; case tgComparisonOperand.IsNull: sql += ApplyWhereSubOperations(std, query, comparisonData) + " IS NULL"; requiresParam = false; break; case tgComparisonOperand.IsNotNull: sql += ApplyWhereSubOperations(std, query, comparisonData) + " IS NOT NULL"; requiresParam = false; break; case tgComparisonOperand.In: case tgComparisonOperand.NotIn: { if (subQuery != null) { // They used a subquery for In or Not sql += ApplyWhereSubOperations(std, query, comparisonData); sql += (comparisonData.Operand == tgComparisonOperand.In) ? " IN" : " NOT IN"; sql += compareTo; } else { comma = String.Empty; if (comparisonData.Operand == tgComparisonOperand.In) { sql += ApplyWhereSubOperations(std, query, comparisonData) + " IN ("; } else { sql += ApplyWhereSubOperations(std, query, comparisonData) + " NOT IN ("; } foreach (object oin in comparisonData.Values) { string str = oin as string; if (str != null) { // STRING sql += comma + Delimiters.StringOpen + str + Delimiters.StringClose; comma = ","; } else if (null != oin as System.Collections.IEnumerable) { // LIST OR COLLECTION OF SOME SORT System.Collections.IEnumerable enumer = oin as System.Collections.IEnumerable; if (enumer != null) { System.Collections.IEnumerator iter = enumer.GetEnumerator(); while (iter.MoveNext()) { object o = iter.Current; string soin = o as string; if (soin != null) sql += comma + Delimiters.StringOpen + soin + Delimiters.StringClose; else sql += comma + Convert.ToString(o); comma = ","; } } } else { // NON STRING OR LIST sql += comma + Convert.ToString(oin); comma = ","; } } sql += ")"; requiresParam = false; } } break; case tgComparisonOperand.Between: NpgsqlCommand sqlCommand = std.cmd as NpgsqlCommand; sql += ApplyWhereSubOperations(std, query, comparisonData) + " BETWEEN "; sql += compareTo; if (comparisonData.ComparisonColumn.Name == null) { sqlCommand.Parameters.AddWithValue(compareTo, comparisonData.BetweenBegin); } if (comparisonData.ComparisonColumn2.Name == null) { IDynamicQuerySerializableInternal iColQuery = comparisonData.Column.Query as IDynamicQuerySerializableInternal; tgColumnMetadataCollection columns = (tgColumnMetadataCollection)iColQuery.Columns; compareTo = Delimiters.Param + columns[comparisonData.Column.Name].PropertyName + (++std.pindex).ToString(); sql += " AND " + compareTo; sqlCommand.Parameters.AddWithValue(compareTo, comparisonData.BetweenEnd); } else { sql += " AND " + Delimiters.ColumnOpen + comparisonData.ComparisonColumn2 + Delimiters.ColumnClose; } requiresParam = false; break; } if (requiresParam) { NpgsqlParameter p; if (comparisonData.Column.Name != null) { p = types[comparisonData.Column.Name]; p = Cache.CloneParameter(p); p.ParameterName = compareTo; p.Value = comparisonData.Value; if (needsStringParameter) { p.DbType = DbType.String; } } else { p = new NpgsqlParameter(compareTo, comparisonData.Value); } std.cmd.Parameters.Add(p); } } } return sql; } protected static string GetOrderByStatement(StandardProviderParameters std, tgDynamicQuerySerializable query) { string sql = String.Empty; string comma = String.Empty; IDynamicQuerySerializableInternal iQuery = query as IDynamicQuerySerializableInternal; if (iQuery.InternalOrderByItems != null) { sql += " ORDER BY "; foreach (tgOrderByItem orderByItem in iQuery.InternalOrderByItems) { bool literal = false; sql += comma; string columnName = orderByItem.Expression.Column.Name; if (columnName != null && columnName[0] == '<') { sql += columnName.Substring(1, columnName.Length - 2); if (orderByItem.Direction == tgOrderByDirection.Unassigned) { literal = true; // They must provide the DESC/ASC in the literal string } } else { // Is in Set Operation (kind of a tricky workaround) if (iQuery.HasSetOperation) { string joinAlias = iQuery.JoinAlias; iQuery.JoinAlias = " "; sql += GetExpressionColumn(std, query, orderByItem.Expression, false, false); iQuery.JoinAlias = joinAlias; } else { sql += GetExpressionColumn(std, query, orderByItem.Expression, false, false); } } if (!literal) { if (orderByItem.Direction == tgOrderByDirection.Ascending) sql += " ASC"; else sql += " DESC"; } comma = ","; } } return sql; } protected static string GetGroupByStatement(StandardProviderParameters std, tgDynamicQuerySerializable query) { string sql = String.Empty; string comma = String.Empty; IDynamicQuerySerializableInternal iQuery = query as IDynamicQuerySerializableInternal; if (iQuery.InternalGroupByItems != null) { sql += " GROUP BY "; foreach (tgGroupByItem groupBy in iQuery.InternalGroupByItems) { sql += comma; string columnName = groupBy.Expression.Column.Name; if (columnName != null && columnName[0] == '<') sql += columnName.Substring(1, columnName.Length - 2); else sql += GetExpressionColumn(std, query, groupBy.Expression, false, false); comma = ","; } if (query.tg.WithRollup) { sql += " WITH ROLLUP"; } } return sql; } protected static string GetSetOperationStatement(StandardProviderParameters std, tgDynamicQuerySerializable query) { string sql = String.Empty; IDynamicQuerySerializableInternal iQuery = query as IDynamicQuerySerializableInternal; if (iQuery.InternalSetOperations != null) { foreach (tgSetOperation setOperation in iQuery.InternalSetOperations) { switch (setOperation.SetOperationType) { case tgSetOperationType.Union: sql += " UNION "; break; case tgSetOperationType.UnionAll: sql += " UNION ALL "; break; case tgSetOperationType.Intersect: sql += " INTERSECT "; break; case tgSetOperationType.Except: sql += " EXCEPT "; break; } sql += BuildQuery(std, setOperation.Query); } } return sql; } protected static string GetExpressionColumn(StandardProviderParameters std, tgDynamicQuerySerializable query, tgExpression expression, bool inExpression, bool useAlias) { string sql = String.Empty; if (expression.CaseWhen != null) { return GetCaseWhenThenEnd(std, query, expression.CaseWhen); } if (expression.HasMathmaticalExpression) { sql += GetMathmaticalExpressionColumn(std, query, expression.MathmaticalExpression); } else { sql += GetColumnName(expression.Column); } if (expression.SubOperators != null) { if (expression.Column.Distinct) { sql = BuildSubOperationsSql(std, "DISTINCT " + sql, expression.SubOperators); } else { sql = BuildSubOperationsSql(std, sql, expression.SubOperators); } } if (!inExpression && useAlias) { if (expression.SubOperators != null || expression.Column.HasAlias) { sql += " AS " + Delimiters.ColumnOpen + expression.Column.Alias + Delimiters.ColumnClose; } } return sql; } protected static string GetCaseWhenThenEnd(StandardProviderParameters std, tgDynamicQuerySerializable query, tgCase caseWhenThen) { string sql = string.Empty; Tiraggo.DynamicQuery.tgCase.tgSimpleCaseData caseStatement = caseWhenThen; tgColumnItem column = caseStatement.QueryItem; sql += "CASE "; List<tgComparison> list = new List<tgComparison>(); foreach (Tiraggo.DynamicQuery.tgCase.tgSimpleCaseData.tgCaseClause caseClause in caseStatement.Cases) { sql += " WHEN "; if (!caseClause.When.IsExpression) { sql += GetComparisonStatement(std, query, caseClause.When.Comparisons, string.Empty); } else { if (!caseClause.When.Expression.IsLiteralValue) { sql += GetExpressionColumn(std, query, caseClause.When.Expression, false, true); } else { if (caseClause.When.Expression.LiteralValue is string) { sql += Delimiters.StringOpen + caseClause.When.Expression.LiteralValue + Delimiters.StringClose; } else { sql += Convert.ToString(caseClause.When.Expression.LiteralValue); } } } sql += " THEN "; if (!caseClause.Then.IsLiteralValue) { sql += GetExpressionColumn(std, query, caseClause.Then, false, true); } else { if (caseClause.Then.LiteralValue is string) { sql += Delimiters.StringOpen + caseClause.Then.LiteralValue + Delimiters.StringClose; } else { sql += Convert.ToString(caseClause.Then.LiteralValue); } } } if (caseStatement.Else != null) { sql += " ELSE "; if (!caseStatement.Else.IsLiteralValue) { sql += GetExpressionColumn(std, query, caseStatement.Else, false, true); } else { if (caseStatement.Else.LiteralValue is string) { sql += Delimiters.StringOpen + caseStatement.Else.LiteralValue + Delimiters.StringClose; } else { sql += Convert.ToString(caseStatement.Else.LiteralValue); } } } sql += " END "; sql += " AS " + Delimiters.ColumnOpen + column.Alias + Delimiters.ColumnOpen; return sql; } protected static string GetMathmaticalExpressionColumn(StandardProviderParameters std, tgDynamicQuerySerializable query, tgMathmaticalExpression mathmaticalExpression) { string sql = "("; if (mathmaticalExpression.ItemFirst) { sql += GetExpressionColumn(std, query, mathmaticalExpression.SelectItem1, true, false); sql += esArithmeticOperatorToString(mathmaticalExpression); if (mathmaticalExpression.SelectItem2 != null) { sql += GetExpressionColumn(std, query, mathmaticalExpression.SelectItem2, true, false); } else { sql += GetMathmaticalExpressionLiteralType(std, mathmaticalExpression); } } else { if (mathmaticalExpression.SelectItem2 != null) { sql += GetExpressionColumn(std, query, mathmaticalExpression.SelectItem2, true, true); } else { sql += GetMathmaticalExpressionLiteralType(std, mathmaticalExpression); } sql += esArithmeticOperatorToString(mathmaticalExpression); sql += GetExpressionColumn(std, query, mathmaticalExpression.SelectItem1, true, false); } sql += ")"; return sql; } protected static string esArithmeticOperatorToString(tgMathmaticalExpression mathmaticalExpression) { switch (mathmaticalExpression.Operator) { case tgArithmeticOperator.Add: // MEG - 4/26/08, I'm not thrilled with this check here, will revist on future release if (mathmaticalExpression.SelectItem1.Column.Datatype == tgSystemType.String || (mathmaticalExpression.SelectItem1.HasMathmaticalExpression && mathmaticalExpression.SelectItem1.MathmaticalExpression.LiteralType == tgSystemType.String) || (mathmaticalExpression.SelectItem1.HasMathmaticalExpression && mathmaticalExpression.SelectItem1.MathmaticalExpression.SelectItem1.Column.Datatype == tgSystemType.String) || (mathmaticalExpression.LiteralType == tgSystemType.String)) return "||"; else return "+"; case tgArithmeticOperator.Subtract: return "-"; case tgArithmeticOperator.Multiply: return "*"; case tgArithmeticOperator.Divide: return "/"; case tgArithmeticOperator.Modulo: return "%"; default: return ""; } } protected static string GetMathmaticalExpressionLiteralType(StandardProviderParameters std, tgMathmaticalExpression mathmaticalExpression) { switch (mathmaticalExpression.LiteralType) { case tgSystemType.String: return Delimiters.StringOpen + (string)mathmaticalExpression.Literal + Delimiters.StringClose; case tgSystemType.DateTime: return Delimiters.StringOpen + ((DateTime)(mathmaticalExpression.Literal)).ToShortDateString() + Delimiters.StringClose; default: return Convert.ToString(mathmaticalExpression.Literal); } } protected static string ApplyWhereSubOperations(StandardProviderParameters std, tgDynamicQuerySerializable query, tgComparison.tgComparisonData comparisonData) { string sql = string.Empty; if (comparisonData.HasExpression) { sql += GetMathmaticalExpressionColumn(std, query, comparisonData.Expression); if (comparisonData.SubOperators != null && comparisonData.SubOperators.Count > 0) { sql = BuildSubOperationsSql(std, sql, comparisonData.SubOperators); } return sql; } string delimitedColumnName = GetColumnName(comparisonData.Column); if (comparisonData.SubOperators != null) { sql = BuildSubOperationsSql(std, delimitedColumnName, comparisonData.SubOperators); } else { sql = delimitedColumnName; } return sql; } protected static string BuildSubOperationsSql(StandardProviderParameters std, string columnName, List<tgQuerySubOperator> subOperators) { string sql = string.Empty; subOperators.Reverse(); Stack<object> stack = new Stack<object>(); if (subOperators != null) { foreach (tgQuerySubOperator op in subOperators) { switch (op.SubOperator) { case tgQuerySubOperatorType.ToLower: sql += "LOWER("; stack.Push(")"); break; case tgQuerySubOperatorType.ToUpper: sql += "UPPER("; stack.Push(")"); break; case tgQuerySubOperatorType.LTrim: sql += "LTRIM("; stack.Push(")"); break; case tgQuerySubOperatorType.RTrim: sql += "RTRIM("; stack.Push(")"); break; case tgQuerySubOperatorType.Trim: sql += "LTRIM(RTRIM("; stack.Push("))"); break; case tgQuerySubOperatorType.SubString: sql += "SUBSTRING("; stack.Push(")"); stack.Push(op.Parameters["length"]); stack.Push(","); if (op.Parameters.ContainsKey("start")) { stack.Push(op.Parameters["start"]); stack.Push(","); } else { // They didn't pass in start so we start // at the beginning stack.Push(1); stack.Push(","); } break; case tgQuerySubOperatorType.Coalesce: sql += "COALESCE("; stack.Push(")"); stack.Push(op.Parameters["expressions"]); stack.Push(","); break; case tgQuerySubOperatorType.Date: sql += "DATE_TRUNC('day',"; stack.Push(")"); break; case tgQuerySubOperatorType.Length: sql += "CHAR_LENGTH("; stack.Push(")"); break; case tgQuerySubOperatorType.Round: sql += "ROUND("; stack.Push(")"); stack.Push(op.Parameters["SignificantDigits"]); stack.Push(","); break; case tgQuerySubOperatorType.DatePart: sql += "EXTRACT("; sql += op.Parameters["DatePart"]; sql += " FROM "; stack.Push(")"); break; case tgQuerySubOperatorType.Avg: sql += "AVG("; stack.Push(")"); break; case tgQuerySubOperatorType.Count: sql += "COUNT("; stack.Push(")"); break; case tgQuerySubOperatorType.Max: sql += "MAX("; stack.Push(")"); break; case tgQuerySubOperatorType.Min: sql += "MIN("; stack.Push(")"); break; case tgQuerySubOperatorType.StdDev: sql += "STDDEV("; stack.Push(")"); break; case tgQuerySubOperatorType.Sum: sql += "SUM("; stack.Push(")"); break; case tgQuerySubOperatorType.Var: sql += "VARIANCE("; stack.Push(")"); break; case tgQuerySubOperatorType.Cast: sql += "CAST("; stack.Push(")"); if (op.Parameters.Count > 1) { stack.Push(")"); if (op.Parameters.Count == 2) { stack.Push(op.Parameters["length"].ToString()); } else { stack.Push(op.Parameters["scale"].ToString()); stack.Push(","); stack.Push(op.Parameters["precision"].ToString()); } stack.Push("("); } stack.Push(GetCastSql((tgCastType)op.Parameters["tgCastType"])); stack.Push(" AS "); break; } } sql += columnName; while (stack.Count > 0) { sql += stack.Pop().ToString(); } } return sql; } protected static string GetCastSql(tgCastType castType) { switch (castType) { case tgCastType.Boolean: return "bool"; case tgCastType.Byte: return "tinyint"; case tgCastType.Char: return "char"; case tgCastType.DateTime: return "timestamp"; case tgCastType.Double: return "float8"; case tgCastType.Decimal: return "numeric"; case tgCastType.Guid: return "uuid"; case tgCastType.Int16: return "int2"; case tgCastType.Int32: return "int4"; case tgCastType.Int64: return "int8"; case tgCastType.Single: return "float4"; case tgCastType.String: return "varchar"; default: return "error"; } } protected static string GetColumnName(tgColumnItem column) { if (column.Query == null || column.Query.tg.JoinAlias == " ") { return Delimiters.ColumnOpen + column.Name + Delimiters.ColumnClose; } else { IDynamicQuerySerializableInternal iQuery = column.Query as IDynamicQuerySerializableInternal; if (iQuery.IsInSubQuery) { return column.Query.tg.JoinAlias + "." + Delimiters.ColumnOpen + column.Name + Delimiters.ColumnClose; } else { string alias = iQuery.SubQueryAlias == string.Empty ? iQuery.JoinAlias : iQuery.SubQueryAlias; return alias + "." + Delimiters.ColumnOpen + column.Name + Delimiters.ColumnClose; } } } private static int NextParamIndex(IDbCommand cmd) { return cmd.Parameters.Count; } private static string GetSubquerySearchCondition(tgDynamicQuerySerializable query) { string searchCondition = String.Empty; IDynamicQuerySerializableInternal iQuery = query as IDynamicQuerySerializableInternal; switch (iQuery.SubquerySearchCondition) { case tgSubquerySearchCondition.All: searchCondition = "ALL"; break; case tgSubquerySearchCondition.Any: searchCondition = "ANY"; break; case tgSubquerySearchCondition.Some: searchCondition = "SOME"; break; } return searchCondition; } } }
#region Licence... /* The MIT License (MIT) Copyright (c) 2014 Oleg Shilo Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #endregion Licence... using System; using System.Collections.Generic; using System.Xml.Linq; namespace WixSharp { /// <summary> /// This class defines website attributes. It is a close equivalent of WebSite WiX element. /// </summary> public partial class WebSite : WixEntity { /// <summary> /// The IIS certificate /// </summary> public IISCertificate Certificate; /// <summary> /// This is the name of the web site that will show up in the IIS management console. /// </summary> public string Description = ""; /// <summary> /// Indicates if the WebSite is to be installed (created on IIS) or existing WebSite should be used to install the corresponding /// WebApplication. The default <see cref="InstallWebSite"/> value is <c>false</c> /// <para>Developers should be aware of the WebSite installation model imposed by WiX/MSI and use <see cref="InstallWebSite"/> carefully.</para> /// <para>If <see cref="InstallWebSite"/> value is set to <c>false</c> the parent WebApplication (<see cref="T:WixSharp.IISVirtualDir"/>) /// will be installed in the brand new (freshly created) WebSite or in the existing one if a site with the same address/port combination already exists /// on IIS). The undesirable side affect of this deployment scenario is that if the existing WebSite was used to install the WebApplication it will be /// deleted on IIS during uninstallation even if this WebSite has other WebApplications installed.</para> /// <para>The "safer" option is to set <see cref="InstallWebSite"/> value to <c>true</c> (default value). In this case the WebApplication will /// be installed in an existing WebSite with matching address/port. If the match is not found the installation will fail. During the uninstallation /// only installed WebApplication will be removed from IIS.</para> /// </summary> public bool InstallWebSite = false; /// <summary> /// Initializes a new instance of the <see cref="WebSite" /> class. /// </summary> public WebSite() { } /// <summary> /// Initializes a new instance of the <see cref="WebSite"/> class. /// </summary> /// <param name="id">The name.</param> /// <param name="description">The description of the web site (as it shows up in the IIS management console).</param> public WebSite(Id id, string description) { this.Id = id; this.Description = description; } /// <summary> /// Initializes a new instance of the <see cref="WebSite"/> class. /// </summary> /// <param name="description">The description of the web site (as it shows up in the IIS management console).</param> public WebSite(string description) { this.Name = "WebSite"; //to become a prefix of the auto-generated Id this.Description = description; } /// <summary> /// Initializes a new instance of the <see cref="WebSite"/> class. /// </summary> /// <param name="id">The id.</param> public WebSite(Id id) { this.Id = id; } ///// <summary> ///// Collection of <see cref="T:WebSite.Certificate"/> associated with website. ///// </summary> //public Certificate[] Certificates = new Certificate[0]; /// <summary> /// Initializes a new instance of the <see cref="WebSite"/> class. /// </summary> /// <param name="description">The description of the web site (as it shows up in the IIS management console).</param> /// <param name="addressDefinition">The address definition.</param> public WebSite(string description, string addressDefinition) { this.Name = "WebSite"; //to become a prffix of the auto-generated Id this.Description = description; this.AddressesDefinition = addressDefinition; } /// <summary> /// Initializes a new instance of the <see cref="WebSite"/> class. /// </summary> /// <param name="id">The id.</param> /// <param name="description">The description.</param> /// <param name="addressDefinition">The address definition.</param> public WebSite(Id id, string description, string addressDefinition) { this.Id = id; this.AddressesDefinition = addressDefinition; this.Description = description; } internal void ProcessAddressesDefinition() { if (!AddressesDefinition.IsEmpty()) { List<WebAddress> addressesToAdd = new List<WebAddress>(); foreach (string addressDef in AddressesDefinition.Split(";".ToCharArray(), StringSplitOptions.RemoveEmptyEntries)) { try { string[] tokens = addressDef.Split(":".ToCharArray(), StringSplitOptions.RemoveEmptyEntries); string address = tokens[0]; string port = tokens[1]; if (tokens[1].ContainsWixConstants()) { addressesToAdd.Add(new WebAddress { Address = address, AttributesDefinition = "Port=" + port }); } else { addressesToAdd.Add(new WebAddress { Address = address, Port = Convert.ToInt32(port) }); } } catch (Exception e) { throw new Exception("Invalid AddressesDefinition", e); } } this.addresses = addressesToAdd.ToArray(); } } /// <summary> /// This class defines website address. It is a close equivalent of WebAddress WiX element. /// </summary> public partial class WebAddress : WixEntity { /// <summary> /// The IP address for the web address. To specify the "All Unassigned" IP address, do not specify /// this attribute or specify its value as "*". The IP address is also used to determine if the WebSite is already installed. /// The IP address must match exactly (empty value matches "All Unassigned") unless "*" is used which will match any existing IP (including "All Unassigned"). /// </summary> public string Address = "*"; /// <summary> /// Sets the port number. /// </summary> public int Port = 0; /// <summary> /// Optional attributes of the <c>WebAddress Element</c> (e.g. Secure:YesNoPath). /// </summary> /// <example> /// <code> /// var address = new WebAddress /// { /// Port = 80, /// Attributes = new Dictionary&lt;string, string&gt; { { "Secure", "Yes" } }; /// ... /// </code> /// </example> public new Dictionary<string, string> Attributes { get { return base.Attributes; } set { base.Attributes = value; } } } /// <summary> /// Specification for auto-generating the <see cref="T:WebSite.WebAddresses"/> collection. /// <para>If <see cref="AddressesDefinition"/> is specified, the existing content of <see cref="Addresses"/> will be ignored /// and replaced with the auto-generated one at compile time.</para> /// </summary> /// <example> /// <c>webSite.AddressesDefinition = "*:80;*90";</c> will be parsed and converted to an array of <see cref="T:WixSharp.WebSite.WebAddress"/> as follows: /// <code> /// ... /// webSite.Addresses = new [] /// { /// new WebSite.WebAddress /// { /// Address = "*", /// Port = 80 /// }, /// new WebSite.WebAddress /// { /// Address = "*", /// Port = 80 /// } /// } /// </code> /// </example> public string AddressesDefinition = ""; /// <summary> /// Reference to a WebApplication that is to be installed as part of this web site. /// </summary> public string WebApplication = null; /// <summary> /// Collection of <see cref="T:WebSite.WebAddresses"/> associated with website. /// <para> /// The user specified values of <see cref="Addresses"/> will be ignored and replaced with the /// auto-generated addresses if <see cref="AddressesDefinition"/> is specified either directly or via appropriate <see cref="WebSite"/> constructor. /// </para> /// </summary> public WebAddress[] Addresses { get { ProcessAddressesDefinition(); return addresses; } set { addresses = value; } } WebAddress[] addresses = new WebAddress[0]; } /// <summary> /// This class defines WebAppPool WiX element. It is used to specify the application pool for this application in IIS 6 applications. /// </summary> public partial class WebAppPool : WixEntity { /// <summary> /// Initializes a new instance of the <see cref="WebAppPool"/> class. /// </summary> /// <param name="name">The name.</param> /// <param name="attributesDefinition">The attributes definition. This parameter is used to set encapsulated <see cref="T:WixSharp.WixEntity.AttributesDefinition"/>.</param> public WebAppPool(string name, string attributesDefinition) { base.Name = name; base.AttributesDefinition = attributesDefinition; } /// <summary> /// Initializes a new instance of the <see cref="WebAppPool"/> class. /// </summary> /// <param name="name">The name.</param> public WebAppPool(string name) { base.Name = name; } /// <summary> /// Initializes a new instance of the <see cref="WebAppPool"/> class. /// </summary> public WebAppPool() { } } /// <summary> /// This class defines WebDirProperites WiX element. The class itself has no distinctive behaviour nor schema. It is fully relying on /// encapsulated <see cref="T:WixSharp.WixEntity.AttributesDefinition"/>. /// </summary> public partial class WebDirProperties : WixEntity { /// <summary> /// Initializes a new instance of the <see cref="WebDirProperties"/> class. /// </summary> /// <param name="attributesDefinition">The attributes definition. This parameter is used to set encapsulated <see cref="T:WixSharp.WixEntity.AttributesDefinition"/>.</param> public WebDirProperties(string attributesDefinition) { base.AttributesDefinition = attributesDefinition; } /// <summary> /// Performs an implicit conversion from <see cref="System.String"/> to <see cref="WebDirProperties"/>. /// </summary> /// <param name="attributesDefinition">The attributes definition.</param> /// <returns> /// The result of the conversion. /// </returns> public static implicit operator WebDirProperties(string attributesDefinition) { return new WebDirProperties(attributesDefinition); } } /// <summary> /// This class defines IIS Virtual Directory. It is a close equivalent of WebVirtualDirectory WiX element. /// </summary> public class IISVirtualDir : WixEntity { /// <summary> /// Initializes a new instance of the <see cref="IISVirtualDir" /> class. /// </summary> public IISVirtualDir() { base.Name = "VirtDir"; //to become a prffix of the auto-generated Id } /// <summary> /// WebSite in which this virtual directory belongs. /// </summary> public WebSite WebSite = null; #region WebVirtualDir Element Attributes /// <summary> /// Gets or sets the application name, which is the URL relative path used to access this virtual directory. /// <para> /// It is a full equivalent of <see cref="WixSharp.IISVirtualDir.Alias"/>. /// </para> /// </summary> /// <value>The name.</value> public new string Name { get { return Alias; } set { Alias = value; } } /// <summary> /// Sets the application name, which is the URL relative path used to access this virtual directory. If not set, the <see cref="AppName"/> will be used. /// </summary> public string Alias = ""; #endregion WebVirtualDir Element Attributes //IISVirtualDir-to-WebAppliction is one-to-one relationship #region WebApplication Element attributes /// <summary> /// Sets the name of this Web application. /// </summary> public string AppName = "MyWebApp"; //WebApplication element attribute /// <summary> /// Sets the Enable Session State option. When enabled, you can set the session timeout using the SessionTimeout attribute. /// </summary> public bool? AllowSessions;// YesNoDefaultType //WebApplication element attribute /// <summary> /// Sets the option that enables response buffering in the application, which allows ASP script to set response headers anywhere in the script. /// </summary> public bool? Buffer;// YesNoDefaultType //WebApplication element attribute /// <summary> /// Enable ASP client-side script debugging. /// </summary> public bool? ClientDebugging;// YesNoDefaultType //WebApplication element attribute /// <summary> /// Sets the default script language for the site. /// </summary> public DefaultScript? DefaultScript; //WebApplication element attribute /// <summary> /// Sets the application isolation level for this application for pre-IIS 6 applications. /// </summary> public Isolation? Isolation; //WebApplication element attribute /// <summary> /// Sets the parent paths option, which allows a client to use relative paths to reach parent directories from this application. /// </summary> public bool? ParentPaths;// YesNoDefaultType //WebApplication element attribute /// <summary> /// Sets the timeout value for executing ASP scripts. /// </summary> public int? ScriptTimeout; //WebApplication element attribute /// <summary> /// Enable ASP server-side script debugging. /// </summary> public bool? ServerDebugging;// YesNoDefaultType //WebApplication element attribute /// <summary> /// Sets the timeout value for sessions in minutes. /// </summary> public int? SessionTimeout; //WebApplication element attribute /// <summary> /// References a WebAppPool instance to use as the application pool for this application in IIS 6 applications. /// </summary> public WebAppPool WebAppPool; //WebApplication element attribute /// <summary> /// WebDirProperites used by one or more WebSites. /// </summary> public WebDirProperties WebDirProperties; #endregion WebApplication Element attributes } }
using Cake.Common.Tools.GitVersion; using Cake.Core; using Cake.Core.IO; public static class GitAlias { // Mono cake compiler does not like enums in this code. public const int ResetTypeDefault = 0; public const int ResetTypeSoft = 1; public const int ResetTypeHard = 2; public static void DeleteTag(ICakeContext context, string tagName, GitToolSettings settings = null) { var builder = new ProcessArgumentBuilder(); builder.Append("tag"); builder.Append("-d"); builder.AppendQuoted(tagName); var tool = new GitTool(context); tool.RunGit(settings ?? new GitToolSettings(), builder); } public static void Tag(ICakeContext context, string tagName, GitToolSettings settings = null) { var builder = new ProcessArgumentBuilder(); builder.Append("tag"); builder.AppendQuoted(tagName); var tool = new GitTool(context); tool.RunGit(settings ?? new GitToolSettings(), builder); } public static void CheckoutDetached(ICakeContext context, string branchName, GitToolSettings settings = null) { var builder = new ProcessArgumentBuilder(); builder.Append("checkout"); builder.Append("--detach"); builder.AppendQuoted(branchName); var tool = new GitTool(context); tool.RunGit(settings ?? new GitToolSettings(), builder); } public static void Checkout(ICakeContext context, string branchName, bool force = false, GitToolSettings settings = null) { var builder = new ProcessArgumentBuilder(); builder.Append("checkout"); builder.AppendQuoted(branchName); if (force) builder.Append("-f"); var tool = new GitTool(context); tool.RunGit(settings ?? new GitToolSettings(), builder); } public static void Branch(ICakeContext context, string branchName, bool force = false, GitToolSettings settings = null) { var builder = new ProcessArgumentBuilder(); builder.Append("checkout"); builder.Append("-b"); builder.AppendQuoted(branchName); if (force) builder.Append("-f"); var tool = new GitTool(context); tool.RunGit(settings ?? new GitToolSettings(), builder); } public static void DeleteBranch(ICakeContext context, string branchName, bool force = false, GitToolSettings settings = null) { var builder = new ProcessArgumentBuilder(); builder.Append("branch"); builder.Append("-d"); if (force) builder.Append("-f"); builder.AppendQuoted(branchName); var tool = new GitTool(context); tool.RunGit(settings ?? new GitToolSettings(), builder); } public static void Merge(ICakeContext context, string branchName, bool allowFF = false, GitToolSettings settings = null) { var builder = new ProcessArgumentBuilder(); builder.Append("merge"); if (!allowFF) builder.Append("--no-ff"); builder.AppendQuoted(branchName); var tool = new GitTool(context); tool.RunGit(settings ?? new GitToolSettings(), builder); } public static void MergeOurs(ICakeContext context, string branchName, GitToolSettings settings = null) { var builder = new ProcessArgumentBuilder(); builder.Append("merge"); builder.Append("-s"); builder.Append("ours"); builder.AppendQuoted(branchName); var tool = new GitTool(context); tool.RunGit(settings ?? new GitToolSettings(), builder); } /// A merge that forces the contents of the release-branch into master while preserving master as a series of merges. This never fails. /// This is also a safe operation (in our git-flow context here), as master is the representation of releases and is fed from release-branches. /// Those release branches are simultaniously merged both into master and develop. /// /// This merge script is based on the answer in StackOverflow here: /// http://stackoverflow.com/a/27338013 public static void MergeRelease(ICakeContext context, string stageBranchName, GitToolSettings settings = null) { var assertedVersions = context.GitVersion(new GitVersionSettings {OutputType = GitVersionOutput.Json}); var releaseBranch = assertedVersions.BranchName; Checkout(context, releaseBranch, true, settings); // Do a merge commit. The content of this commit does not matter, so use a strategy that never fails. // Note: This advances branchA. MergeOurs(context, stageBranchName, settings); // # Change working tree and index to desired content. // # --detach ensures branchB will not move when doing the reset in the next step. CheckoutDetached(context, stageBranchName, settings); // # Move HEAD to branchA without changing contents of working tree and index. Reset(context, ResetTypeSoft, releaseBranch, settings); // # 'attach' HEAD to branchA. # This ensures branchA will move when doing 'commit --amend'. Checkout(context, releaseBranch, false, settings); // # Change content of merge commit to current index (i.e. content of branchB). CommitAmend(context, settings); } public static void Push(ICakeContext context, string remoteName, string branchName, GitToolSettings settings = null) { var builder = new ProcessArgumentBuilder(); builder.Append("push"); builder.AppendQuoted(remoteName); builder.AppendQuoted(branchName); var tool = new GitTool(context); tool.RunGit(settings ?? new GitToolSettings(), builder); } public static void PushTag(ICakeContext context, string remoteName, string tagName, GitToolSettings settings = null) { var builder = new ProcessArgumentBuilder(); builder.Append("push"); builder.Append("--tags"); builder.AppendQuoted(remoteName); builder.AppendQuoted(tagName); var tool = new GitTool(context); tool.RunGit(settings ?? new GitToolSettings(), builder); } public static void CommitAmend(ICakeContext context, GitToolSettings settings = null) { var builder = new ProcessArgumentBuilder(); builder.Append("commit"); builder.Append("--amend"); builder.Append("-C"); builder.Append("HEAD"); var tool = new GitTool(context); tool.RunGit(settings ?? new GitToolSettings(), builder); } public static void Commit(ICakeContext context, string message, bool all = false, GitToolSettings settings = null) { var builder = new ProcessArgumentBuilder(); builder.Append("commit"); if (all) builder.Append("-a"); builder.Append("-m"); builder.AppendQuoted(message); var tool = new GitTool(context); tool.RunGit(settings ?? new GitToolSettings(), builder); } public static void Add(ICakeContext context, string file, GitToolSettings settings = null) { var builder = new ProcessArgumentBuilder(); builder.Append("add"); builder.AppendQuoted(file); var tool = new GitTool(context); tool.RunGit(settings ?? new GitToolSettings(), builder); } public static void Log(ICakeContext context, GitToolSettings settings = null) { var builder = new ProcessArgumentBuilder(); builder.Append("log"); var tool = new GitTool(context); tool.RunGit(settings ?? new GitToolSettings(), builder); } public static void Reset(ICakeContext context, int type = ResetTypeDefault, string id = null, GitToolSettings settings = null) { var builder = new ProcessArgumentBuilder(); builder.Append("reset"); if (type == ResetTypeHard) builder.Append("--hard"); else if (type == ResetTypeSoft) builder.Append("--soft"); if (!string.IsNullOrEmpty(id)) builder.AppendQuoted(id); var tool = new GitTool(context); tool.RunGit(settings ?? new GitToolSettings(), builder); } public static bool CheckUncommitedChanges(ICakeContext context, GitToolSettings settings = null) { var builder = new ProcessArgumentBuilder(); builder.Append("diff"); builder.Append("--quiet"); builder.Append("--exit-code"); var tool = new GitTool(context); return tool.RunGitCheck(settings ?? new GitToolSettings(), builder) == 0; } public static bool CheckBranchExists(ICakeContext context, string branchName, GitToolSettings settings = null) { var builder = new ProcessArgumentBuilder(); builder.Append("show-ref"); builder.Append("--quiet"); builder.Append("--heads"); builder.AppendQuoted(branchName); var tool = new GitTool(context); return tool.RunGitCheck(settings ?? new GitToolSettings(), builder) == 0; } }
// Camera Path 3 // Available on the Unity Asset Store // Copyright (c) 2013 Jasper Stocker http://support.jasperstocker.com/camera-path/ // For support contact [email protected] // // THIS CODE AND INFORMATION ARE PROVIDED "AS IS" WITHOUT WARRANTY OF ANY // KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A // PARTICULAR PURPOSE. using UnityEngine; public class CPMath { //VECTOR 3 Calculations //Calculate the Bezier spline position //t - the time (0-1) of the curve to sample //p - the start point of the curve //a - control point from p //b - control point from q //q - the end point of the curve public static Vector3 CalculateBezier(float t, Vector3 p, Vector3 a, Vector3 b, Vector3 q) { float t2 = t * t; float t3 = t2 * t; float u = 1.0f - t; float u2 = u * u; float u3 = u2 * u; Vector3 output = u3 * p + 3 * u2 * t * a + 3 * u * t2 * b + t3 * q; return output; } /// <summary> /// /// </summary> /// <param name="p"></param> /// <param name="a"></param> /// <param name="b"></param> /// <param name="q"></param> /// <param name="t"></param> /// <param name="tension">1 is high, 0 normal, -1 is low</param> /// <param name="bias">0 is even, positive is towards first segment, negative towards the other</param> /// <returns></returns> public static Vector3 CalculateHermite(Vector3 p, Vector3 a, Vector3 b, Vector3 q, float t, float tension, float bias) { float t2 = t * t; float t3 = t2 * t; Vector3 m0 = (a - p) * (1 + bias) * (1 - tension) / 2; m0 += (b-a)*(1-bias)*(1-tension)/2; Vector3 m1 = (b - a) * (1 + bias) * (1 - tension) / 2; m1 += (q-b)*(1-bias)*(1-tension)/2; float a0 = 2 * t3 - 3 * t2 + 1; float a1 = t3 - 2 * t2 + t; float a2 = t3 - t2; float a3 = -2 * t3 + 3 * t2; return(a0*a+a1*m0+a2*m1+a3*b); } public static Vector3 CalculateCatmullRom(Vector3 p, Vector3 a, Vector3 b, Vector3 q, float t) { var t2 = t * t; var a0 = -0.5f * p + 1.5f * a - 1.5f * b + 0.5f * q; var a1 = p - 2.5f * a + 2f * b - 0.5f * q; var a2 = -0.5f * p + 0.5f * b; var a3 = a; return (a0 * t * t2) + (a1 * t2) + (a2 * t) + a3; } //VECTOR 2 Calculations public static Vector2 CalculateBezier(float t, Vector2 p, Vector2 a, Vector2 b, Vector2 q) { float t2 = t * t; float t3 = t2 * t; float u = 1.0f - t; float u2 = u * u; float u3 = u2 * u; Vector2 output = u3 * p + 3 * u2 * t * a + 3 * u * t2 * b + t3 * q; return output; } /// <summary> /// /// </summary> /// <param name="p"></param> /// <param name="a"></param> /// <param name="b"></param> /// <param name="q"></param> /// <param name="t"></param> /// <param name="tension">1 is high, 0 normal, -1 is low</param> /// <param name="bias">0 is even, positive is towards first segment, negative towards the other</param> /// <returns></returns> public static Vector2 CalculateHermite(Vector2 p, Vector2 a, Vector2 b, Vector2 q, float t, float tension, float bias) { float t2 = t * t; float t3 = t2 * t; Vector2 m0 = (a - p) * (1 + bias) * (1 - tension) / 2; m0 += (b - a) * (1 - bias) * (1 - tension) / 2; Vector2 m1 = (b - a) * (1 + bias) * (1 - tension) / 2; m1 += (q - b) * (1 - bias) * (1 - tension) / 2; float a0 = 2 * t3 - 3 * t2 + 1; float a1 = t3 - 2 * t2 + t; float a2 = t3 - t2; float a3 = -2 * t3 + 3 * t2; return (a0 * a + a1 * m0 + a2 * m1 + a3 * b); } public static Vector2 CalculateCatmullRom(Vector2 p, Vector2 a, Vector2 b, Vector2 q, float t) { var t2 = t * t; var a0 = -0.5f * p + 1.5f * a - 1.5f * b + 0.5f * q; var a1 = p - 2.5f * a + 2f * b - 0.5f * q; var a2 = -0.5f * p + 0.5f * b; var a3 = a; return (a0 * t * t2) + (a1 * t2) + (a2 * t) + a3; } //Calculate Cubic Rotation //p - point we start with //q - next point //nextNormIndex - the point immediately before p //prevNormIndex - the point immediately after q //t - time (0-1) of the curve pq to sample public static Quaternion CalculateCubic(Quaternion p, Quaternion a, Quaternion b, Quaternion q, float t) { // Ensure all the quaternions are proper for interpolation - thanks Jeff! if (Quaternion.Dot(p, q) < 0.0f) q = new Quaternion(-q.x, -q.y, -q.z, -q.w); if (Quaternion.Dot(p, a) < 0.0f) a = new Quaternion(-a.x, -a.y, -a.z, -a.w); if (Quaternion.Dot(p, b) < 0.0f) b = new Quaternion(-b.x, -b.y, -b.z, -b.w); Quaternion a1 = SquadTangent(a, p, q); Quaternion b1 = SquadTangent(p, q, b); float slerpT = 2.0f * t * (1.0f - t); Quaternion sl = Slerp(Slerp(p, q, t), Slerp(a1, b1, t), slerpT); return sl; } public static float CalculateCubic(float p, float a, float b, float q, float t) { float t2 = t * t; float t3 = t2 * t; float u = 1.0f - t; float u2 = u * u; float u3 = u2 * u; return (u3 * p + 3 * u2 * t * q + 3 * u * t2 * a + t3 * b); } /// <summary> /// /// </summary> /// <param name="p"></param> /// <param name="a"></param> /// <param name="b"></param> /// <param name="q"></param> /// <param name="t"></param> /// <param name="tension">1 is high, 0 normal, -1 is low</param> /// <param name="bias">0 is even, positive is towards first segment, negative towards the other</param> /// <returns></returns> public static float CalculateHermite(float p, float a, float b, float q, float t, float tension, float bias) { float t2 = t * t; float t3 = t2 * t; float m0 = (a - p) * (1 + bias) * (1 - tension) / 2; m0 += (b - a) * (1 - bias) * (1 - tension) / 2; float m1 = (b - a) * (1 + bias) * (1 - tension) / 2; m1 += (q - b) * (1 - bias) * (1 - tension) / 2; float a0 = 2 * t3 - 3 * t2 + 1; float a1 = t3 - 2 * t2 + t; float a2 = t3 - t2; float a3 = -2 * t3 + 3 * t2; return (a0 * a + a1 * m0 + a2 * m1 + a3 * b); } public static float CalculateCatmullRom(float p, float a, float b, float q, float t) { var t2 = t * t; var a0 = -0.5f * p + 1.5f * a - 1.5f * b + 0.5f * q; var a1 = p - 2.5f * a + 2f * b - 0.5f * q; var a2 = -0.5f * p + 0.5f * b; var a3 = a; return (a0 * t * t2) + (a1 * t2) + (a2 * t) + a3; } public static float SmoothStep(float val) { return val * val * (3.0f - 2.0f * val); } //calculate the Squad tangent for use in Cubic Rotation Interpolation public static Quaternion SquadTangent(Quaternion before, Quaternion center, Quaternion after) { Quaternion l1 = LnDif(center, before); Quaternion l2 = LnDif(center, after); Quaternion e = Quaternion.identity; for (int i = 0; i < 4; ++i) { e[i] = -0.25f * (l1[i] + l2[i]); } return center * (Exp(e)); } public static Quaternion LnDif(Quaternion a, Quaternion b) { Quaternion dif = Quaternion.Inverse(a) * b; Normalize(dif); return Log(dif); } public static Quaternion Normalize(Quaternion q) { float norm = Mathf.Sqrt(q.x * q.x + q.y * q.y + q.z * q.z + q.w * q.w); if (norm > 0.0f) { q.x /= norm; q.y /= norm; q.z /= norm; q.w /= norm; } else { q.x = 0.0f; q.y = 0.0f; q.z = 0.0f; q.w = 1.0f; } return q; } public static Quaternion Exp(Quaternion q) { float theta = Mathf.Sqrt(q[0] * q[0] + q[1] * q[1] + q[2] * q[2]); if (theta < 1E-6) { return new Quaternion(q[0], q[1], q[2], Mathf.Cos(theta)); } //else float coef = Mathf.Sin(theta) / theta; return new Quaternion(q[0] * coef, q[1] * coef, q[2] * coef, Mathf.Cos(theta)); } public static Quaternion Log(Quaternion q) { float len = Mathf.Sqrt(q[0] * q[0] + q[1] * q[1] + q[2] * q[2]); if (len < 1E-6) { return new Quaternion(q[0], q[1], q[2], 0.0f); } //else float coef = Mathf.Acos(q[3]) / len; return new Quaternion(q[0] * coef, q[1] * coef, q[2] * coef, 0.0f); } //based on [Shoe87] implementation public static Quaternion Slerp(Quaternion p, Quaternion q, float t) { Quaternion ret; float cos = Quaternion.Dot(p, q); float fCoeff0, fCoeff1; if ((1.0f + cos) > 0.00001f) { if ((1.0f - cos) > 0.00001f) { float omega = Mathf.Acos(cos); float somega = Mathf.Sin(omega); float invSin = (Mathf.Sign(somega) * 1.0f) / somega; fCoeff0 = Mathf.Sin((1.0f - t) * omega) * invSin; fCoeff1 = Mathf.Sin(t * omega) * invSin; } else { fCoeff0 = 1.0f - t; fCoeff1 = t; } ret.x = fCoeff0 * p.x + fCoeff1 * q.x; ret.y = fCoeff0 * p.y + fCoeff1 * q.y; ret.z = fCoeff0 * p.z + fCoeff1 * q.z; ret.w = fCoeff0 * p.w + fCoeff1 * q.w; } else { fCoeff0 = Mathf.Sin((1.0f - t) * Mathf.PI * 0.5f); fCoeff1 = Mathf.Sin(t * Mathf.PI * 0.5f); ret.x = fCoeff0 * p.x - fCoeff1 * p.y; ret.y = fCoeff0 * p.y + fCoeff1 * p.x; ret.z = fCoeff0 * p.z - fCoeff1 * p.w; ret.w = p.z; } return ret; } public static Quaternion Nlerp(Quaternion p, Quaternion q, float t) { Quaternion ret; float w1 = 1.0f - t; ret.x = w1 * p.x + t * q.x; ret.y = w1 * p.y + t * q.y; ret.z = w1 * p.z + t * q.z; ret.w = w1 * p.w + t * q.w; Normalize(ret); return ret; } public static Quaternion GetQuatConjugate(Quaternion q) { return new Quaternion(-q.x, -q.y, -q.z, q.w); } public static float SignedAngle(Vector3 from, Vector3 to, Vector3 up) { Vector3 direction = (to - from).normalized; Vector3 cross = Vector3.Cross(up, direction); float dot = Vector3.Dot(from, cross); return Vector3.Angle(from, to) * Mathf.Sign(dot); } public static float ClampAngle(float angle, float min, float max) { if (angle < -360) angle += 360; if (angle > 360) angle -= 360; return Mathf.Clamp(angle, -max, -min); } }
namespace CSharpMath.Rendering.Text { using System; using System.Collections.Generic; using System.Linq; using Atom; using Display; using Display.Displays; using BackEnd; using FrontEnd; using Display = Display.Displays.ListDisplay<BackEnd.Fonts, BackEnd.Glyph>; using CSharpMath.Structures; using System.Drawing; public static class TextTypesetter { public static (Display relative, Display absolute) Layout(TextAtom input, Fonts inputFont, float canvasWidth) { // TODO: Multiply these constants by resolution const float abovedisplayskip = 12, abovedisplayshortskip = 0, belowdisplayskip = 12, belowdisplayshortskip = 7; if (input == null) return (new Display(Array.Empty<IDisplay<Fonts, Glyph>>()), new Display(Array.Empty<IDisplay<Fonts, Glyph>>())); float accumulatedHeight = 0; //indicator of the need to apply belowdisplay(short)skip when line break bool afterDisplayMaths = false; void BreakLine(TextLayoutLineBuilder line, List<IDisplay<Fonts, Glyph>> displayList, List<IDisplay<Fonts, Glyph>> displayMathList, bool appendLineGap = true) { if (afterDisplayMaths) { accumulatedHeight += line.Width > displayMathList.Last().Position.X ? belowdisplayskip : belowdisplayshortskip; afterDisplayMaths = false; } line.Clear(0, -accumulatedHeight, displayList, ref accumulatedHeight, true, appendLineGap); } //variables captured by this method are currently unchangable by TextAtoms void AddDisplaysWithLineBreaks( TextAtom atom, Fonts fonts, TextLayoutLineBuilder line, List<IDisplay<Fonts, Glyph>> displayList, List<IDisplay<Fonts, Glyph>> displayMathList, FontStyle style, Color? color ) { IDisplay<Fonts, Glyph> display; switch (atom) { case TextAtom.List list: foreach (var a in list.Content) AddDisplaysWithLineBreaks (a, fonts, line, displayList, displayMathList, style, color); break; case TextAtom.Style st: AddDisplaysWithLineBreaks (st.Content, fonts, line, displayList, displayMathList, st.FontStyle, color); break; case TextAtom.Size sz: AddDisplaysWithLineBreaks (sz.Content, new Fonts(fonts, sz.PointSize), line, displayList, displayMathList, style, color); break; case TextAtom.Colored c: AddDisplaysWithLineBreaks (c.Content, fonts, line, displayList, displayMathList, style, c.Colour); break; case TextAtom.Space sp: //Allow space at start of line since user explicitly specified its length //Also \par generates this kind of spaces line.AddSpace(sp.Content.ActualLength(MathTable.Instance, fonts)); break; case TextAtom.Newline n: BreakLine(line, displayList, displayMathList); break; case TextAtom.Math m when m.DisplayStyle: var lastLineWidth = line.Width; BreakLine(line, displayList, displayMathList, false); display = Typesetter.CreateLine(m.Content, fonts, TypesettingContext.Instance, LineStyle.Display); var displayX = IPainterExtensions.GetDisplayPosition (display.Width, display.Ascent, display.Descent, fonts.PointSize, canvasWidth, float.NaN, TextAlignment.Top, default, default, default).X; //\because When displayList.LastOrDefault() is null, //the false condition is selected //\therefore Append abovedisplayshortskip which defaults //to 0 when nothing is above the display-style maths accumulatedHeight += lastLineWidth > displayX ? abovedisplayskip : abovedisplayshortskip; accumulatedHeight += display.Ascent; display.Position = new System.Drawing.PointF(displayX, -accumulatedHeight); accumulatedHeight += display.Descent; afterDisplayMaths = true; if (color != null) display.SetTextColorRecursive(color); displayMathList.Add(display); break; void FinalizeInlineDisplay(float ascender, float rawDescender, float lineGap, bool forbidAtLineStart = false) { if (color != null) display.SetTextColorRecursive(color); if (line.Width + display.Width > canvasWidth && !forbidAtLineStart) BreakLine(line, displayList, displayMathList); //rawDescender is taken directly from font file and is negative, //while IDisplay.Descender is positive line.Add(display, ascender, -rawDescender, lineGap); } case TextAtom.Text t: var content = UnicodeFontChanger.ChangeFont(t.Content, style); var glyphs = GlyphFinder.Instance.FindGlyphs(fonts, content); //Calling Select(g => g.Typeface).Distinct() speeds up query up to 10 times, //Calling Max(Func<,>) instead of Select(Func<,>).Max() speeds up query 2 times var typefaces = glyphs.Select(g => g.Typeface).Distinct().ToList(); display = new TextRunDisplay<Fonts, Glyph>( new AttributedGlyphRun<Fonts, Glyph>(content, glyphs, fonts), Range.NotFound, TypesettingContext.Instance ); FinalizeInlineDisplay( typefaces.Max(tf => tf.Ascender * tf.CalculateScaleToPixelFromPointSize(fonts.PointSize)), typefaces.Min(tf => tf.Descender * tf.CalculateScaleToPixelFromPointSize(fonts.PointSize)), typefaces.Max(tf => tf.LineGap * tf.CalculateScaleToPixelFromPointSize(fonts.PointSize)) ); break; case TextAtom.Math m: if (m.DisplayStyle) throw new InvalidCodePathException ("Display style maths should have been handled above this switch."); display = Typesetter.CreateLine(m.Content, fonts, TypesettingContext.Instance, LineStyle.Text); var scale = fonts.MathTypeface.CalculateScaleToPixelFromPointSize(fonts.PointSize); FinalizeInlineDisplay(fonts.MathTypeface.Ascender * scale, fonts.MathTypeface.Descender * scale, fonts.MathTypeface.LineGap * scale); break; case TextAtom.ControlSpace cs: var spaceGlyph = GlyphFinder.Instance.Lookup(fonts, ' '); display = new TextRunDisplay<Fonts, Glyph>( new AttributedGlyphRun<Fonts, Glyph>(" ", new[] { spaceGlyph }, fonts), Range.NotFound, TypesettingContext.Instance ); scale = spaceGlyph.Typeface.CalculateScaleToPixelFromPointSize(fonts.PointSize); FinalizeInlineDisplay(spaceGlyph.Typeface.Ascender * scale, spaceGlyph.Typeface.Descender * scale, spaceGlyph.Typeface.LineGap * scale, forbidAtLineStart: true); //No spaces at start of line break; case TextAtom.Accent a: var accentGlyph = GlyphFinder.Instance.FindGlyphForCharacterAtIndex( fonts, a.AccentChar.Length - 1, a.AccentChar ); scale = accentGlyph.Typeface.CalculateScaleToPixelFromPointSize(fonts.PointSize); var accenteeDisplayList = new List<IDisplay<Fonts, Glyph>>(); var invalidDisplayMaths = new List<IDisplay<Fonts, Glyph>>(); var accentDisplayLine = new TextLayoutLineBuilder(); AddDisplaysWithLineBreaks(a.Content, fonts, accentDisplayLine, accenteeDisplayList, invalidDisplayMaths, style, color); float _ = default; accentDisplayLine.Clear (0, 0, accenteeDisplayList, ref _, false, false); System.Diagnostics.Debug.Assert(invalidDisplayMaths.Count == 0, "Display maths inside an accentee is unsupported -- ignoring display maths"); var accentee = new Display(accenteeDisplayList); var accenteeCodepoint = a.Content.SingleChar(style); var accenteeSingleGlyph = accenteeCodepoint.HasValue ? GlyphFinder.Instance.Lookup(fonts, accenteeCodepoint.GetValueOrDefault()) : GlyphFinder.Instance.EmptyGlyph; var accentDisplay = new AccentDisplay<Fonts, Glyph>( Typesetter.CreateAccentGlyphDisplay( accentee, accenteeSingleGlyph, accentGlyph, TypesettingContext.Instance, fonts, Range.NotFound ), accentee); display = accentDisplay; //accentDisplay.Ascent does not take account of accent glyph's extra height //-> accent will be out of bounds if it is on the first line FinalizeInlineDisplay( Math.Max(accentGlyph.Typeface.Ascender * scale, accentDisplay.Accent.Position.Y + accentDisplay.Ascent), accentGlyph.Typeface.Descender * scale, accentGlyph.Typeface.LineGap * scale); break; case TextAtom.Comment _: break; case null: throw new InvalidOperationException ("TextAtoms should never be null. You must have sneaked one in."); case var a: throw new InvalidCodePathException ($"There should not be an unknown type of TextAtom. However, one with type {a.GetType()} was encountered."); } } var relativePositionList = new List<IDisplay<Fonts, Glyph>>(); var absolutePositionList = new List<IDisplay<Fonts, Glyph>>(); var globalLine = new TextLayoutLineBuilder(); AddDisplaysWithLineBreaks( input, inputFont, globalLine, relativePositionList, absolutePositionList, FontStyle.Roman /*FontStyle.Default is FontStyle.Italic, FontStyle.Roman is no change to characters*/, null ); BreakLine(globalLine, relativePositionList, absolutePositionList); //remember to finalize the last line var adjustedCanvasWidth = float.IsInfinity(canvasWidth) || float.IsNaN(canvasWidth) ? Math.Max(relativePositionList.CollectionWidth(), absolutePositionList.IsNonEmpty() ? absolutePositionList.Max(d => d.Width) : 0) : canvasWidth; if (float.IsInfinity(canvasWidth) || float.IsNaN(canvasWidth)) // In this case X of every display in absolutePositionList will be Infinity or NaN // Use max(width of relativePositionList, width of absolutePositionList) as canvasWidth instead foreach (var absDisplay in absolutePositionList) absDisplay.Position = new System.Drawing.PointF( IPainterExtensions.GetDisplayPosition (absDisplay.Width, absDisplay.Ascent, absDisplay.Descent, inputFont.PointSize, adjustedCanvasWidth, float.NaN, TextAlignment.Top, default, default, default).X, absDisplay.Position.Y); return (new Display(relativePositionList), new Display(absolutePositionList)); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using NUnit.Framework; using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer; using DateTools = Lucene.Net.Documents.DateTools; using Document = Lucene.Net.Documents.Document; using Field = Lucene.Net.Documents.Field; using IndexReader = Lucene.Net.Index.IndexReader; using IndexWriter = Lucene.Net.Index.IndexWriter; using Term = Lucene.Net.Index.Term; using Directory = Lucene.Net.Store.Directory; using RAMDirectory = Lucene.Net.Store.RAMDirectory; using LuceneTestCase = Lucene.Net.Util.LuceneTestCase; namespace Lucene.Net.Search { /// <summary> Unit test for sorting code. /// /// </summary> [Serializable] [TestFixture] public class TestCustomSearcherSort:LuceneTestCase { private Directory index = null; private Query query = null; // reduced from 20000 to 2000 to speed up test... private const int INDEX_SIZE = 2000; /*public TestCustomSearcherSort(System.String name):base(name) { }*/ /*[STAThread] public static void Main(System.String[] argv) { // TestRunner.run(suite()); // {{Aroush-2.9}} how is this done in NUnit? }*/ /*public static Test suite() { return new TestSuite(typeof(TestCustomSearcherSort)); }*/ // create an index for testing private Directory GetIndex() { RAMDirectory indexStore = new RAMDirectory(); IndexWriter writer = new IndexWriter(indexStore, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED); RandomGen random = new RandomGen(this, NewRandom()); for (int i = 0; i < INDEX_SIZE; ++i) { // don't decrease; if to low the problem doesn't show up Document doc = new Document(); if ((i % 5) != 0) { // some documents must not have an entry in the first sort field doc.Add(new Field("publicationDate_", random.GetLuceneDate(), Field.Store.YES, Field.Index.NOT_ANALYZED)); } if ((i % 7) == 0) { // some documents to match the query (see below) doc.Add(new Field("content", "test", Field.Store.YES, Field.Index.ANALYZED)); } // every document has a defined 'mandant' field doc.Add(new Field("mandant", System.Convert.ToString(i % 3), Field.Store.YES, Field.Index.NOT_ANALYZED)); writer.AddDocument(doc); } writer.Optimize(); writer.Close(); return indexStore; } /// <summary> Create index and query for test cases. </summary> [SetUp] public override void SetUp() { base.SetUp(); index = GetIndex(); query = new TermQuery(new Term("content", "test")); } /// <summary> Run the test using two CustomSearcher instances. </summary> [Test] public virtual void TestFieldSortCustomSearcher() { // log("Run testFieldSortCustomSearcher"); // define the sort criteria Sort custSort = new Sort(new SortField("publicationDate_", SortField.STRING), SortField.FIELD_SCORE); Searcher searcher = new CustomSearcher(this, index, 2); // search and check hits MatchHits(searcher, custSort); } /// <summary> Run the test using one CustomSearcher wrapped by a MultiSearcher. </summary> [Test] public virtual void TestFieldSortSingleSearcher() { // log("Run testFieldSortSingleSearcher"); // define the sort criteria Sort custSort = new Sort(new SortField("publicationDate_", SortField.STRING), SortField.FIELD_SCORE); Searcher searcher = new MultiSearcher(new Searcher[]{new CustomSearcher(this, index, 2)}); // search and check hits MatchHits(searcher, custSort); } /// <summary> Run the test using two CustomSearcher instances. </summary> [Test] public virtual void TestFieldSortMultiCustomSearcher() { // log("Run testFieldSortMultiCustomSearcher"); // define the sort criteria Sort custSort = new Sort(new SortField("publicationDate_", SortField.STRING), SortField.FIELD_SCORE); Searcher searcher = new MultiSearcher(new Searchable[]{new CustomSearcher(this, index, 0), new CustomSearcher(this, index, 2)}); // search and check hits MatchHits(searcher, custSort); } // make sure the documents returned by the search match the expected list private void MatchHits(Searcher searcher, Sort sort) { // make a query without sorting first ScoreDoc[] hitsByRank = searcher.Search(query, null, 1000).ScoreDocs; CheckHits(hitsByRank, "Sort by rank: "); // check for duplicates System.Collections.IDictionary resultMap = new System.Collections.SortedList(); // store hits in TreeMap - TreeMap does not allow duplicates; existing entries are silently overwritten for (int hitid = 0; hitid < hitsByRank.Length; ++hitid) { resultMap[hitsByRank[hitid].Doc] = hitid; // Value: Hits-Objekt Index } // now make a query using the sort criteria ScoreDoc[] resultSort = searcher.Search(query, null, 1000, sort).ScoreDocs; CheckHits(resultSort, "Sort by custom criteria: "); // check for duplicates // besides the sorting both sets of hits must be identical for (int hitid = 0; hitid < resultSort.Length; ++hitid) { System.Int32 idHitDate = (System.Int32) resultSort[hitid].Doc; // document ID from sorted search if (!resultMap.Contains(idHitDate)) { Log("ID " + idHitDate + " not found. Possibliy a duplicate."); } Assert.IsTrue(resultMap.Contains(idHitDate)); // same ID must be in the Map from the rank-sorted search // every hit must appear once in both result sets --> remove it from the Map. // At the end the Map must be empty! resultMap.Remove(idHitDate); } if (resultMap.Count == 0) { // log("All hits matched"); } else { Log("Couldn't match " + resultMap.Count + " hits."); } Assert.AreEqual(resultMap.Count, 0); } /// <summary> Check the hits for duplicates.</summary> /// <param name="hits"> /// </param> private void CheckHits(ScoreDoc[] hits, System.String prefix) { if (hits != null) { System.Collections.IDictionary idMap = new System.Collections.SortedList(); for (int docnum = 0; docnum < hits.Length; ++docnum) { int luceneId; luceneId = hits[docnum].Doc; if (idMap.Contains(luceneId)) { System.Text.StringBuilder message = new System.Text.StringBuilder(prefix); message.Append("Duplicate key for hit index = "); message.Append(docnum); message.Append(", previous index = "); message.Append(((System.Int32) idMap[luceneId]).ToString()); message.Append(", Lucene ID = "); message.Append(luceneId); Log(message.ToString()); } else { idMap[luceneId] = docnum; } } } } // Simply write to console - choosen to be independant of log4j etc private void Log(System.String message) { System.Console.Out.WriteLine(message); } public class CustomSearcher:IndexSearcher { private void InitBlock(TestCustomSearcherSort enclosingInstance) { this.enclosingInstance = enclosingInstance; } private TestCustomSearcherSort enclosingInstance; public TestCustomSearcherSort Enclosing_Instance { get { return enclosingInstance; } } private int switcher; /// <param name="directory"> /// </param> /// <throws> IOException </throws> public CustomSearcher(TestCustomSearcherSort enclosingInstance, Directory directory, int switcher) : base(directory, true) { InitBlock(enclosingInstance); this.switcher = switcher; } /// <param name="r"> /// </param> public CustomSearcher(TestCustomSearcherSort enclosingInstance, IndexReader r, int switcher):base(r) { InitBlock(enclosingInstance); this.switcher = switcher; } /* (non-Javadoc) * @see Lucene.Net.Search.Searchable#search(Lucene.Net.Search.Query, Lucene.Net.Search.Filter, int, Lucene.Net.Search.Sort) */ public override TopFieldDocs Search(Query query, Filter filter, int nDocs, Sort sort) { BooleanQuery bq = new BooleanQuery(); bq.Add(query, Occur.MUST); bq.Add(new TermQuery(new Term("mandant", System.Convert.ToString(switcher))), Occur.MUST); return base.Search(bq, filter, nDocs, sort); } /* (non-Javadoc) * @see Lucene.Net.Search.Searchable#search(Lucene.Net.Search.Query, Lucene.Net.Search.Filter, int) */ public override TopDocs Search(Query query, Filter filter, int nDocs) { BooleanQuery bq = new BooleanQuery(); bq.Add(query, Occur.MUST); bq.Add(new TermQuery(new Term("mandant", System.Convert.ToString(switcher))), Occur.MUST); return base.Search(bq, filter, nDocs); } } private class RandomGen { private void InitBlock(TestCustomSearcherSort enclosingInstance) { this.enclosingInstance = enclosingInstance; System.DateTime temp_calendar; temp_calendar = new System.DateTime(1980, 1, 1, 0, 0, 0, 0, new System.Globalization.GregorianCalendar()); base_Renamed = temp_calendar; } private TestCustomSearcherSort enclosingInstance; public TestCustomSearcherSort Enclosing_Instance { get { return enclosingInstance; } } internal RandomGen(TestCustomSearcherSort enclosingInstance, System.Random random) { InitBlock(enclosingInstance); this.random = random; } private System.Random random; private System.DateTime base_Renamed; // Just to generate some different Lucene Date strings public /*private*/ System.String GetLuceneDate() { return DateTools.TimeToString((base_Renamed.Ticks / TimeSpan.TicksPerMillisecond) + random.Next() - System.Int32.MinValue, DateTools.Resolution.DAY); } } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Collections.Generic; using System.Globalization; using System.Linq; using System.Threading.Tasks; using Xunit; public static unsafe class DateTimeTests { [Fact] public static void TestConstructors() { DateTime dt = new DateTime(2012, 6, 11); ValidateYearMonthDay(dt, 2012, 6, 11); dt = new DateTime(2012, 12, 31, 13, 50, 10); ValidateYearMonthDay(dt, 2012, 12, 31, 13, 50, 10); dt = new DateTime(1973, 10, 6, 14, 30, 0, 500); ValidateYearMonthDay(dt, 1973, 10, 6, 14, 30, 0, 500); dt = new DateTime(1986, 8, 15, 10, 20, 5, DateTimeKind.Local); ValidateYearMonthDay(dt, 1986, 8, 15, 10, 20, 5); } [Fact] public static void TestDateTimeLimits() { DateTime dt = DateTime.MaxValue; ValidateYearMonthDay(dt, 9999, 12, 31); dt = DateTime.MinValue; ValidateYearMonthDay(dt, 1, 1, 1); } [Fact] public static void TestLeapYears() { Assert.Equal(true, DateTime.IsLeapYear(2004)); Assert.Equal(false, DateTime.IsLeapYear(2005)); } [Fact] public static void TestAddition() { DateTime dt = new DateTime(1986, 8, 15, 10, 20, 5, 70); Assert.Equal(17, dt.AddDays(2).Day); Assert.Equal(13, dt.AddDays(-2).Day); Assert.Equal(10, dt.AddMonths(2).Month); Assert.Equal(6, dt.AddMonths(-2).Month); Assert.Equal(1996, dt.AddYears(10).Year); Assert.Equal(1976, dt.AddYears(-10).Year); Assert.Equal(13, dt.AddHours(3).Hour); Assert.Equal(7, dt.AddHours(-3).Hour); Assert.Equal(25, dt.AddMinutes(5).Minute); Assert.Equal(15, dt.AddMinutes(-5).Minute); Assert.Equal(35, dt.AddSeconds(30).Second); Assert.Equal(2, dt.AddSeconds(-3).Second); Assert.Equal(80, dt.AddMilliseconds(10).Millisecond); Assert.Equal(60, dt.AddMilliseconds(-10).Millisecond); } [Fact] public static void TestDayOfWeek() { DateTime dt = new DateTime(2012, 6, 18); Assert.Equal(DayOfWeek.Monday, dt.DayOfWeek); } [Fact] public static void TestTimeSpan() { DateTime dt = new DateTime(2012, 6, 18, 10, 5, 1, 0); TimeSpan ts = dt.TimeOfDay; DateTime newDate = dt.Subtract(ts); Assert.Equal(new DateTime(2012, 6, 18, 0, 0, 0, 0).Ticks, newDate.Ticks); Assert.Equal(dt.Ticks, newDate.Add(ts).Ticks); } [Fact] public static void TestToday() { DateTime today = DateTime.Today; DateTime now = DateTime.Now; ValidateYearMonthDay(today, now.Year, now.Month, now.Day); today = new DateTime(now.Year, now.Month, now.Day, now.Hour, now.Minute, now.Second, DateTimeKind.Utc); Assert.Equal(DateTimeKind.Utc, today.Kind); Assert.Equal(false, today.IsDaylightSavingTime()); } [Fact] public static void TestCoversion() { DateTime today = DateTime.Today; long dateTimeRaw = today.ToBinary(); Assert.Equal(today, DateTime.FromBinary(dateTimeRaw)); dateTimeRaw = today.ToFileTime(); Assert.Equal(today, DateTime.FromFileTime(dateTimeRaw)); dateTimeRaw = today.ToFileTimeUtc(); Assert.Equal(today, DateTime.FromFileTimeUtc(dateTimeRaw).ToLocalTime()); } [Fact] public static void TestOperators() { System.DateTime date1 = new System.DateTime(1996, 6, 3, 22, 15, 0); System.DateTime date2 = new System.DateTime(1996, 12, 6, 13, 2, 0); System.DateTime date3 = new System.DateTime(1996, 10, 12, 8, 42, 0); // diff1 gets 185 days, 14 hours, and 47 minutes. System.TimeSpan diff1 = date2.Subtract(date1); Assert.Equal(new TimeSpan(185, 14, 47, 0), diff1); // date4 gets 4/9/1996 5:55:00 PM. System.DateTime date4 = date3.Subtract(diff1); Assert.Equal(new DateTime(1996, 4, 9, 17, 55, 0), date4); // diff2 gets 55 days 4 hours and 20 minutes. System.TimeSpan diff2 = date2 - date3; Assert.Equal(new TimeSpan(55, 4, 20, 0), diff2); // date5 gets 4/9/1996 5:55:00 PM. System.DateTime date5 = date1 - diff2; Assert.Equal(new DateTime(1996, 4, 9, 17, 55, 0), date5); } [Fact] public static void TestParsingDateTimeWithTimeDesignator() { DateTime result; Assert.True(DateTime.TryParse("4/21 5am", new CultureInfo("en-US"), DateTimeStyles.None, out result)); Assert.Equal(4, result.Month); Assert.Equal(21, result.Day); Assert.Equal(5, result.Hour); Assert.True(DateTime.TryParse("4/21 5pm", new CultureInfo("en-US"), DateTimeStyles.None, out result)); Assert.Equal(4, result.Month); Assert.Equal(21, result.Day); Assert.Equal(17, result.Hour); } public class MyFormater : IFormatProvider { public object GetFormat(Type formatType) { if (typeof(IFormatProvider) == formatType) { return this; } else { return null; } } } [Fact] public static void TestParseWithAdjustToUniversal() { var formater = new MyFormater(); var dateBefore = DateTime.Now.ToString(); var dateAfter = DateTime.ParseExact(dateBefore, "G", formater, DateTimeStyles.AdjustToUniversal); Assert.Equal(dateBefore, dateAfter.ToString()); } [Fact] public static void TestFormatParse() { DateTime dt = new DateTime(2012, 12, 21, 10, 8, 6); CultureInfo ci = new CultureInfo("ja-JP"); string s = string.Format(ci, "{0}", dt); Assert.Equal(dt, DateTime.Parse(s, ci)); } [Fact] public static void TestParse1() { DateTime src = DateTime.MaxValue; String s = src.ToString(); DateTime in_1 = DateTime.Parse(s); String actual = in_1.ToString(); Assert.Equal(s, actual); } [Fact] public static void TestParse2() { DateTime src = DateTime.MaxValue; String s = src.ToString(); DateTime in_1 = DateTime.Parse(s, null); String actual = in_1.ToString(); Assert.Equal(s, actual); } [Fact] public static void TestParse3() { DateTime src = DateTime.MaxValue; String s = src.ToString(); DateTime in_1 = DateTime.Parse(s, null, DateTimeStyles.None); String actual = in_1.ToString(); Assert.Equal(s, actual); } [Fact] public static void TestParseExact3() { DateTime src = DateTime.MaxValue; String s = src.ToString("g"); DateTime in_1 = DateTime.ParseExact(s, "g", null); String actual = in_1.ToString("g"); Assert.Equal(s, actual); } [Fact] public static void TestParseExact4() { DateTime src = DateTime.MaxValue; String s = src.ToString("g"); DateTime in_1 = DateTime.ParseExact(s, "g", null, DateTimeStyles.None); String actual = in_1.ToString("g"); Assert.Equal(s, actual); } [Fact] public static void TestParseExact4a() { DateTime src = DateTime.MaxValue; String s = src.ToString("g"); String[] formats = { "g" }; DateTime in_1 = DateTime.ParseExact(s, formats, null, DateTimeStyles.None); String actual = in_1.ToString("g"); Assert.Equal(s, actual); } [Fact] public static void TestTryParse2() { DateTime src = DateTime.MaxValue; String s = src.ToString("g"); DateTime in_1; bool b = DateTime.TryParse(s, out in_1); Assert.True(b); String actual = in_1.ToString("g"); Assert.Equal(s, actual); } [Fact] public static void TestTryParse4() { DateTime src = DateTime.MaxValue; String s = src.ToString("g"); DateTime in_1; bool b = DateTime.TryParse(s, null, DateTimeStyles.None, out in_1); Assert.True(b); String actual = in_1.ToString("g"); Assert.Equal(s, actual); } [Fact] public static void TestTryParseExact() { DateTime src = DateTime.MaxValue; String s = src.ToString("g"); DateTime in_1; bool b = DateTime.TryParseExact(s, "g", null, DateTimeStyles.None, out in_1); Assert.True(b); String actual = in_1.ToString("g"); Assert.Equal(s, actual); } [Fact] public static void TestTryParseExactA() { DateTime src = DateTime.MaxValue; String s = src.ToString("g"); String[] formats = { "g" }; DateTime in_1; bool b = DateTime.TryParseExact(s, formats, null, DateTimeStyles.None, out in_1); Assert.True(b); String actual = in_1.ToString("g"); Assert.Equal(s, actual); } [Fact] public static void TestGetDateTimeFormats() { char[] allStandardFormats = { 'd', 'D', 'f', 'F', 'g', 'G', 'm', 'M', 'o', 'O', 'r', 'R', 's', 't', 'T', 'u', 'U', 'y', 'Y', }; DateTime july28 = new DateTime(2009, 7, 28, 5, 23, 15); List<string> july28Formats = new List<string>(); foreach (char format in allStandardFormats) { string[] dates = july28.GetDateTimeFormats(format); Assert.True(dates.Length > 0); DateTime parsedDate; Assert.True(DateTime.TryParseExact(dates[0], format.ToString(), CultureInfo.CurrentCulture, DateTimeStyles.None, out parsedDate)); july28Formats.AddRange(dates); } List<string> actualJuly28Formats = july28.GetDateTimeFormats().ToList(); Assert.Equal(july28Formats.OrderBy(t => t), actualJuly28Formats.OrderBy(t => t)); actualJuly28Formats = july28.GetDateTimeFormats(CultureInfo.CurrentCulture).ToList(); Assert.Equal(july28Formats.OrderBy(t => t), actualJuly28Formats.OrderBy(t => t)); } [Fact] public static void TestGetDateTimeFormats_FormatSpecifier_InvalidFormat() { DateTime july28 = new DateTime(2009, 7, 28, 5, 23, 15); Assert.Throws<FormatException>(() => july28.GetDateTimeFormats('x')); } internal static void ValidateYearMonthDay(DateTime dt, int year, int month, int day) { Assert.Equal(dt.Year, year); Assert.Equal(dt.Month, month); Assert.Equal(dt.Day, day); } internal static void ValidateYearMonthDay(DateTime dt, int year, int month, int day, int hour, int minute, int second) { ValidateYearMonthDay(dt, year, month, day); Assert.Equal(dt.Hour, hour); Assert.Equal(dt.Minute, minute); Assert.Equal(dt.Second, second); } internal static void ValidateYearMonthDay(DateTime dt, int year, int month, int day, int hour, int minute, int second, int millisecond) { ValidateYearMonthDay(dt, year, month, day, hour, minute, second); Assert.Equal(dt.Millisecond, millisecond); } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Collections.Immutable; using System.Diagnostics; using System.IO; using System.Reflection.Internal; using System.Reflection.Metadata; using System.Threading; namespace System.Reflection.PortableExecutable { /// <summary> /// Portable Executable format reader. /// </summary> /// <remarks> /// The implementation is thread-safe, that is multiple threads can read data from the reader in parallel. /// Disposal of the reader is not thread-safe (see <see cref="Dispose"/>). /// </remarks> public sealed class PEReader : IDisposable { // May be null in the event that the entire image is not // deemed necessary and we have been instructed to read // the image contents without being lazy. private MemoryBlockProvider _peImage; // If we read the data from the image lazily (peImage != null) we defer reading the PE headers. private PEHeaders _lazyPEHeaders; private AbstractMemoryBlock _lazyMetadataBlock; private AbstractMemoryBlock _lazyImageBlock; private AbstractMemoryBlock[] _lazyPESectionBlocks; /// <summary> /// Creates a Portable Executable reader over a PE image stored in memory. /// </summary> /// <param name="peImage">Pointer to the start of the PE image.</param> /// <param name="size">The size of the PE image.</param> /// <exception cref="ArgumentNullException"><paramref name="peImage"/> is <see cref="IntPtr.Zero"/>.</exception> /// <exception cref="ArgumentOutOfRangeException"><paramref name="size"/> is negative.</exception> /// <remarks> /// The memory is owned by the caller and not released on disposal of the <see cref="PEReader"/>. /// The caller is responsible for keeping the memory alive and unmodified throughout the lifetime of the <see cref="PEReader"/>. /// The content of the image is not read during the construction of the <see cref="PEReader"/> /// </remarks> public unsafe PEReader(byte* peImage, int size) { if (peImage == null) { throw new ArgumentNullException("peImage"); } if (size < 0) { throw new ArgumentOutOfRangeException("size"); } _peImage = new ExternalMemoryBlockProvider(peImage, size); } /// <summary> /// Creates a Portable Executable reader over a PE image stored in a stream. /// </summary> /// <param name="peStream">PE image stream.</param> /// <exception cref="ArgumentNullException"><paramref name="peStream"/> is null.</exception> /// <exception cref="BadImageFormatException"> /// <see cref="PEStreamOptions.PrefetchMetadata"/> is specified and the PE headers of the image are invalid. /// </exception> /// <remarks> /// Ownership of the stream is transferred to the <see cref="PEReader"/> upon successful validation of constructor arguments. It will be /// disposed by the <see cref="PEReader"/> and the caller must not manipulate it. /// </remarks> public PEReader(Stream peStream) : this(peStream, PEStreamOptions.Default) { } /// <summary> /// Creates a Portable Executable reader over a PE image stored in a stream beginning at its current position and ending at the end of the stream. /// </summary> /// <param name="peStream">PE image stream.</param> /// <param name="options"> /// Options specifying how sections of the PE image are read from the stream. /// /// Unless <see cref="PEStreamOptions.LeaveOpen"/> is specified, ownership of the stream is transferred to the <see cref="PEReader"/> /// upon successful argument validation. It will be disposed by the <see cref="PEReader"/> and the caller must not manipulate it. /// /// Unless <see cref="PEStreamOptions.PrefetchMetadata"/> or <see cref="PEStreamOptions.PrefetchEntireImage"/> is specified no data /// is read from the stream during the construction of the <see cref="PEReader"/>. Furthermore, the stream must not be manipulated /// by caller while the <see cref="PEReader"/> is alive and undisposed. /// /// If <see cref="PEStreamOptions.PrefetchMetadata"/> or <see cref="PEStreamOptions.PrefetchEntireImage"/>, the <see cref="PEReader"/> /// will have read all of the data requested during construction. As such, if <see cref="PEStreamOptions.LeaveOpen"/> is also /// specified, the caller retains full ownership of the stream and is assured that it will not be manipulated by the <see cref="PEReader"/> /// after construction. /// </param> /// <exception cref="ArgumentNullException"><paramref name="peStream"/> is null.</exception> /// <exception cref="ArgumentOutOfRangeException"><paramref name="options"/> has an invalid value.</exception> /// <exception cref="BadImageFormatException"> /// <see cref="PEStreamOptions.PrefetchMetadata"/> is specified and the PE headers of the image are invalid. /// </exception> public PEReader(Stream peStream, PEStreamOptions options) : this(peStream, options, (int?)null) { } /// <summary> /// Creates a Portable Executable reader over a PE image of the given size beginning at the stream's current position. /// </summary> /// <param name="peStream">PE image stream.</param> /// <param name="size">PE image size.</param> /// <param name="options"> /// Options specifying how sections of the PE image are read from the stream. /// /// Unless <see cref="PEStreamOptions.LeaveOpen"/> is specified, ownership of the stream is transferred to the <see cref="PEReader"/> /// upon successful argument validation. It will be disposed by the <see cref="PEReader"/> and the caller must not manipulate it. /// /// Unless <see cref="PEStreamOptions.PrefetchMetadata"/> or <see cref="PEStreamOptions.PrefetchEntireImage"/> is specified no data /// is read from the stream during the construction of the <see cref="PEReader"/>. Furthermore, the stream must not be manipulated /// by caller while the <see cref="PEReader"/> is alive and undisposed. /// /// If <see cref="PEStreamOptions.PrefetchMetadata"/> or <see cref="PEStreamOptions.PrefetchEntireImage"/>, the <see cref="PEReader"/> /// will have read all of the data requested during construction. As such, if <see cref="PEStreamOptions.LeaveOpen"/> is also /// specified, the caller retains full ownership of the stream and is assured that it will not be manipulated by the <see cref="PEReader"/> /// after construction. /// </param> /// <exception cref="ArgumentOutOfRangeException">Size is negative or extends past the end of the stream.</exception> public PEReader(Stream peStream, PEStreamOptions options, int size) : this(peStream, options, (int?)size) { } private unsafe PEReader(Stream peStream, PEStreamOptions options, int? sizeOpt) { if (peStream == null) { throw new ArgumentNullException("peStream"); } if (!peStream.CanRead || !peStream.CanSeek) { throw new ArgumentException(MetadataResources.StreamMustSupportReadAndSeek, "peStream"); } if (!options.IsValid()) { throw new ArgumentOutOfRangeException("options"); } long start = peStream.Position; int size = PEBinaryReader.GetAndValidateSize(peStream, sizeOpt); bool closeStream = true; try { bool isFileStream = FileStreamReadLightUp.IsFileStream(peStream); if ((options & (PEStreamOptions.PrefetchMetadata | PEStreamOptions.PrefetchEntireImage)) == 0) { _peImage = new StreamMemoryBlockProvider(peStream, start, size, isFileStream, (options & PEStreamOptions.LeaveOpen) != 0); closeStream = false; } else { // Read in the entire image or metadata blob: if ((options & PEStreamOptions.PrefetchEntireImage) != 0) { var imageBlock = StreamMemoryBlockProvider.ReadMemoryBlockNoLock(peStream, isFileStream, 0, (int)Math.Min(peStream.Length, int.MaxValue)); _lazyImageBlock = imageBlock; _peImage = new ExternalMemoryBlockProvider(imageBlock.Pointer, imageBlock.Size); // if the caller asked for metadata initialize the PE headers (calculates metadata offset): if ((options & PEStreamOptions.PrefetchMetadata) != 0) { InitializePEHeaders(); } } else { // The peImage is left null, but the lazyMetadataBlock is initialized up front. _lazyPEHeaders = new PEHeaders(peStream); _lazyMetadataBlock = StreamMemoryBlockProvider.ReadMemoryBlockNoLock(peStream, isFileStream, _lazyPEHeaders.MetadataStartOffset, _lazyPEHeaders.MetadataSize); } // We read all we need, the stream is going to be closed. } } finally { if (closeStream && (options & PEStreamOptions.LeaveOpen) == 0) { peStream.Dispose(); } } } /// <summary> /// Creates a Portable Executable reader over a PE image stored in a byte array. /// </summary> /// <param name="peImage">PE image.</param> /// <remarks> /// The content of the image is not read during the construction of the <see cref="PEReader"/> /// </remarks> /// <exception cref="ArgumentNullException"><paramref name="peImage"/> is null.</exception> public PEReader(ImmutableArray<byte> peImage) { if (peImage.IsDefault) { throw new ArgumentNullException("peImage"); } _peImage = new ByteArrayMemoryProvider(peImage); } /// <summary> /// Disposes all memory allocated by the reader. /// </summary> /// <remarks> /// <see cref="Dispose"/> can be called multiple times (even in parallel). /// However, it is not safe to call <see cref="Dispose"/> in parallel with any other operation on the <see cref="PEReader"/> /// or reading from <see cref="PEMemoryBlock"/>s retrieved from the reader. /// </remarks> public void Dispose() { var image = _peImage; if (image != null) { image.Dispose(); _peImage = null; } var imageBlock = _lazyImageBlock; if (imageBlock != null) { imageBlock.Dispose(); _lazyImageBlock = null; } var metadataBlock = _lazyMetadataBlock; if (metadataBlock != null) { metadataBlock.Dispose(); _lazyMetadataBlock = null; } var peSectionBlocks = _lazyPESectionBlocks; if (peSectionBlocks != null) { foreach (var block in peSectionBlocks) { if (block != null) { block.Dispose(); } } _lazyPESectionBlocks = null; } } /// <summary> /// Gets the PE headers. /// </summary> /// <exception cref="BadImageFormatException">The headers contain invalid data.</exception> public PEHeaders PEHeaders { get { if (_lazyPEHeaders == null) { InitializePEHeaders(); } return _lazyPEHeaders; } } private void InitializePEHeaders() { Debug.Assert(_peImage != null); StreamConstraints constraints; Stream stream = _peImage.GetStream(out constraints); PEHeaders headers; if (constraints.GuardOpt != null) { lock (constraints.GuardOpt) { headers = ReadPEHeadersNoLock(stream, constraints.ImageStart, constraints.ImageSize); } } else { headers = ReadPEHeadersNoLock(stream, constraints.ImageStart, constraints.ImageSize); } Interlocked.CompareExchange(ref _lazyPEHeaders, headers, null); } private static PEHeaders ReadPEHeadersNoLock(Stream stream, long imageStartPosition, int imageSize) { Debug.Assert(imageStartPosition >= 0 && imageStartPosition <= stream.Length); stream.Seek(imageStartPosition, SeekOrigin.Begin); return new PEHeaders(stream, imageSize); } /// <summary> /// Returns a view of the entire image as a pointer and length. /// </summary> /// <exception cref="InvalidOperationException">PE image not available.</exception> private AbstractMemoryBlock GetEntireImageBlock() { if (_lazyImageBlock == null) { if (_peImage == null) { throw new InvalidOperationException(MetadataResources.PEImageNotAvailable); } var newBlock = _peImage.GetMemoryBlock(); if (Interlocked.CompareExchange(ref _lazyImageBlock, newBlock, null) != null) { // another thread created the block already, we need to dispose ours: newBlock.Dispose(); } } return _lazyImageBlock; } private AbstractMemoryBlock GetMetadataBlock() { if (!HasMetadata) { throw new InvalidOperationException(MetadataResources.PEImageDoesNotHaveMetadata); } if (_lazyMetadataBlock == null) { Debug.Assert(_peImage != null, "We always have metadata if peImage is not available."); var newBlock = _peImage.GetMemoryBlock(PEHeaders.MetadataStartOffset, PEHeaders.MetadataSize); if (Interlocked.CompareExchange(ref _lazyMetadataBlock, newBlock, null) != null) { // another thread created the block already, we need to dispose ours: newBlock.Dispose(); } } return _lazyMetadataBlock; } private AbstractMemoryBlock GetPESectionBlock(int index) { Debug.Assert(index >= 0 && index < PEHeaders.SectionHeaders.Length); Debug.Assert(_peImage != null); if (_lazyPESectionBlocks == null) { Interlocked.CompareExchange(ref _lazyPESectionBlocks, new AbstractMemoryBlock[PEHeaders.SectionHeaders.Length], null); } var newBlock = _peImage.GetMemoryBlock( PEHeaders.SectionHeaders[index].PointerToRawData, PEHeaders.SectionHeaders[index].SizeOfRawData); if (Interlocked.CompareExchange(ref _lazyPESectionBlocks[index], newBlock, null) != null) { // another thread created the block already, we need to dispose ours: newBlock.Dispose(); } return _lazyPESectionBlocks[index]; } /// <summary> /// Return true if the reader can access the entire PE image. /// </summary> /// <remarks> /// Returns false if the <see cref="PEReader"/> is constructed from a stream and only part of it is prefetched into memory. /// </remarks> public bool IsEntireImageAvailable { get { return _lazyImageBlock != null || _peImage != null; } } /// <summary> /// Gets a pointer to and size of the PE image if available (<see cref="IsEntireImageAvailable"/>). /// </summary> /// <exception cref="InvalidOperationException">The entire PE image is not available.</exception> public PEMemoryBlock GetEntireImage() { return new PEMemoryBlock(GetEntireImageBlock()); } /// <summary> /// Returns true if the PE image contains CLI metadata. /// </summary> /// <exception cref="BadImageFormatException">The PE headers contain invalid data.</exception> public bool HasMetadata { get { return PEHeaders.MetadataSize > 0; } } /// <summary> /// Loads PE section that contains CLI metadata. /// </summary> /// <exception cref="InvalidOperationException">The PE image doesn't contain metadata (<see cref="HasMetadata"/> returns false).</exception> /// <exception cref="BadImageFormatException">The PE headers contain invalid data.</exception> public PEMemoryBlock GetMetadata() { return new PEMemoryBlock(GetMetadataBlock()); } /// <summary> /// Loads PE section that contains the specified <paramref name="relativeVirtualAddress"/> into memory /// and returns a memory block that starts at <paramref name="relativeVirtualAddress"/> and ends at the end of the containing section. /// </summary> /// <param name="relativeVirtualAddress">Relative Virtual Address of the data to read.</param> /// <returns> /// An empty block if <paramref name="relativeVirtualAddress"/> doesn't represent a location in any of the PE sections of this PE image. /// </returns> /// <exception cref="BadImageFormatException">The PE headers contain invalid data.</exception> public PEMemoryBlock GetSectionData(int relativeVirtualAddress) { var sectionIndex = PEHeaders.GetContainingSectionIndex(relativeVirtualAddress); if (sectionIndex < 0) { return default(PEMemoryBlock); } int relativeOffset = relativeVirtualAddress - PEHeaders.SectionHeaders[sectionIndex].VirtualAddress; int size = PEHeaders.SectionHeaders[sectionIndex].VirtualSize - relativeOffset; AbstractMemoryBlock block; if (_peImage != null) { block = GetPESectionBlock(sectionIndex); } else { block = GetEntireImageBlock(); relativeOffset += PEHeaders.SectionHeaders[sectionIndex].PointerToRawData; } return new PEMemoryBlock(block, relativeOffset); } } }
using Bridge.Contract.Constants; using Mono.Cecil; using Newtonsoft.Json; using Newtonsoft.Json.Linq; using Object.Net.Utilities; using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Text; using Bridge.Contract; namespace Bridge.Translator { public partial class Emitter { protected virtual void WrapToModules() { this.Log.Trace("Wrapping to modules..."); foreach (var outputPair in this.Outputs) { var output = outputPair.Value; foreach (var moduleOutputPair in output.ModuleOutput) { var module = moduleOutputPair.Key; var moduleOutput = moduleOutputPair.Value; this.Log.Trace("Module " + module.Name + " ..."); AbstractEmitterBlock.RemovePenultimateEmptyLines(moduleOutput, true); switch (module.Type) { case ModuleType.CommonJS: this.WrapToCommonJS(moduleOutput, module, output); break; case ModuleType.UMD: this.WrapToUMD(moduleOutput, module, output); break; case ModuleType.ES6: this.WrapToES6(moduleOutput, module, output); break; case ModuleType.AMD: default: this.WrapToAMD(moduleOutput, module, output); break; } } } this.Log.Trace("Wrapping to modules done"); } protected virtual void WrapToAMD(StringBuilder moduleOutput, Module module, IEmitterOutput output) { var str = moduleOutput.ToString(); moduleOutput.Length = 0; this.WriteIndent(moduleOutput, this.InitialLevel); moduleOutput.Append(JS.Funcs.DEFINE + "("); if (!module.NoName) { moduleOutput.Append(this.ToJavaScript(module.OriginalName)); moduleOutput.Append(", "); } var enabledDependecies = this.GetEnabledDependecies(module, output); if (enabledDependecies.Count > 0) { moduleOutput.Append("["); enabledDependecies.Each(md => { moduleOutput.Append(this.ToJavaScript(md.DependencyName)); moduleOutput.Append(", "); }); moduleOutput.Remove(moduleOutput.Length - 2, 2); // remove trailing comma moduleOutput.Append("], "); } moduleOutput.Append("function ("); if (enabledDependecies.Count > 0) { enabledDependecies.Each(md => { moduleOutput.Append(md.VariableName.IsNotEmpty() ? md.VariableName : md.DependencyName); moduleOutput.Append(", "); }); moduleOutput.Remove(moduleOutput.Length - 2, 2); // remove trailing comma } this.WriteNewLine(moduleOutput, ") {"); this.WriteIndent(moduleOutput, this.InitialLevel); this.WriteNewLine(moduleOutput, Emitter.INDENT + "var " + module.Name + " = { };"); moduleOutput.Append(str); if (!str.Trim().EndsWith(Emitter.NEW_LINE)) { this.WriteNewLine(moduleOutput); } this.WriteIndent(moduleOutput, this.InitialLevel); this.WriteNewLine(moduleOutput, Emitter.INDENT + "Bridge.init();"); this.WriteIndent(moduleOutput, this.InitialLevel); this.WriteNewLine(moduleOutput, Emitter.INDENT + "return " + module.Name + ";"); this.WriteIndent(moduleOutput, this.InitialLevel); this.WriteNewLine(moduleOutput, "});"); } private List<IPluginDependency> GetEnabledDependecies(Module module, IEmitterOutput output) { var dependencies = output.ModuleDependencies; var loader = this.AssemblyInfo.Loader; if (dependencies.ContainsKey(module.Name) && dependencies[module.Name].Count > 0) { return dependencies[module.Name].Where(d => !loader.IsManual(d.DependencyName)).ToList(); } return new List<IPluginDependency>(); } protected virtual void WrapToCommonJS(StringBuilder moduleOutput, Module module, IEmitterOutput output) { var str = moduleOutput.ToString(); moduleOutput.Length = 0; moduleOutput.Append(Emitter.INDENT); moduleOutput.Append("(function ("); var enabledDependecies = this.GetEnabledDependecies(module, output); if (enabledDependecies.Count > 0) { enabledDependecies.Each(md => { moduleOutput.Append(md.VariableName.IsNotEmpty() ? md.VariableName : md.DependencyName); moduleOutput.Append(", "); }); moduleOutput.Remove(moduleOutput.Length - 2, 2); // remove trailing comma } this.WriteNewLine(moduleOutput, ") {"); moduleOutput.Append(Emitter.INDENT); this.WriteIndent(moduleOutput, this.InitialLevel); this.WriteNewLine(moduleOutput, "var " + module.Name + " = { };"); moduleOutput.Append(str); if (!str.Trim().EndsWith(Emitter.NEW_LINE)) { this.WriteNewLine(moduleOutput); } this.WriteIndent(moduleOutput, this.InitialLevel); this.WriteNewLine(moduleOutput, Emitter.INDENT + "module.exports." + module.Name + " = " + module.Name + ";"); this.WriteIndent(moduleOutput, this.InitialLevel); moduleOutput.Append("}) ("); if (enabledDependecies.Count > 0) { enabledDependecies.Each(md => { moduleOutput.Append("require(" + this.ToJavaScript(md.DependencyName) + "), "); }); moduleOutput.Remove(moduleOutput.Length - 2, 2); // remove trailing comma } this.WriteNewLine(moduleOutput, ");"); } protected virtual void WrapToUMD(StringBuilder moduleOutput, Module module, IEmitterOutput output) { var str = moduleOutput.ToString(); moduleOutput.Length = 0; this.WriteIndent(moduleOutput, 1); this.WriteNewLine(moduleOutput, "(function (root, factory) {"); this.WriteIndent(moduleOutput, 2); this.WriteNewLine(moduleOutput, "if (typeof define === 'function' && define.amd) {"); this.WriteIndent(moduleOutput, 3); moduleOutput.Append(JS.Funcs.DEFINE + "("); if (!module.NoName) { moduleOutput.Append(this.ToJavaScript(module.OriginalName)); moduleOutput.Append(", "); } var enabledDependecies = this.GetEnabledDependecies(module, output); if (enabledDependecies.Count > 0) { moduleOutput.Append("["); enabledDependecies.Each(md => { moduleOutput.Append(this.ToJavaScript(md.DependencyName)); moduleOutput.Append(", "); }); moduleOutput.Remove(moduleOutput.Length - 2, 2); // remove trailing comma moduleOutput.Append("], "); } this.WriteNewLine(moduleOutput, "factory);"); this.WriteIndent(moduleOutput, 2); this.WriteNewLine(moduleOutput, "} else if (typeof module === 'object' && module.exports) {"); this.WriteIndent(moduleOutput, 3); moduleOutput.Append("module.exports = factory("); if (enabledDependecies.Count > 0) { enabledDependecies.Each(md => { moduleOutput.Append("require(" + this.ToJavaScript(md.DependencyName) + "), "); }); moduleOutput.Remove(moduleOutput.Length - 2, 2); } this.WriteNewLine(moduleOutput, ");"); this.WriteIndent(moduleOutput, 2); this.WriteNewLine(moduleOutput, "} else {"); this.WriteIndent(moduleOutput, 3); moduleOutput.Append("root[" + this.ToJavaScript(module.OriginalName) + "] = factory("); if (enabledDependecies.Count > 0) { enabledDependecies.Each(md => { moduleOutput.Append("root[" + this.ToJavaScript(md.DependencyName) + "], "); }); moduleOutput.Remove(moduleOutput.Length - 2, 2); // remove trailing comma } this.WriteNewLine(moduleOutput, ");"); this.WriteIndent(moduleOutput, 2); this.WriteNewLine(moduleOutput, "}"); this.WriteIndent(moduleOutput, 1); moduleOutput.Append("}(this, function ("); if (enabledDependecies.Count > 0) { enabledDependecies.Each(md => { moduleOutput.Append(md.VariableName ?? md.DependencyName); moduleOutput.Append(", "); }); moduleOutput.Remove(moduleOutput.Length - 2, 2); // remove trailing comma } moduleOutput.Append(") {"); this.WriteNewLine(moduleOutput); this.WriteIndent(moduleOutput, 2); this.WriteNewLine(moduleOutput, "var " + module.Name + " = { };"); moduleOutput.Append(str); if (!str.Trim().EndsWith(Emitter.NEW_LINE)) { this.WriteNewLine(moduleOutput); } this.WriteIndent(moduleOutput, 2); this.WriteNewLine(moduleOutput, "Bridge.init();"); this.WriteIndent(moduleOutput, 2); this.WriteNewLine(moduleOutput, "return " + module.Name + ";"); this.WriteIndent(moduleOutput, 1); this.WriteNewLine(moduleOutput, "}));"); } protected virtual void WrapToES6(StringBuilder moduleOutput, Module module, IEmitterOutput output) { var str = moduleOutput.ToString(); moduleOutput.Length = 0; moduleOutput.Append(Emitter.INDENT); this.WriteNewLine(moduleOutput, "(function () {"); moduleOutput.Append(Emitter.INDENT); this.WriteIndent(moduleOutput, this.InitialLevel); this.WriteNewLine(moduleOutput, "var " + module.Name + " = { };"); var enabledDependecies = this.GetEnabledDependecies(module, output); if (enabledDependecies.Count > 0) { enabledDependecies.Each(md => { moduleOutput.Append(Emitter.INDENT); this.WriteIndent(moduleOutput, this.InitialLevel); this.WriteNewLine(moduleOutput, "import " + (md.VariableName.IsNotEmpty() ? md.VariableName : md.DependencyName) + " from " + this.ToJavaScript(md.DependencyName) + ";"); }); } moduleOutput.Append(str); if (!str.Trim().EndsWith(Emitter.NEW_LINE)) { this.WriteNewLine(moduleOutput); } this.WriteIndent(moduleOutput, this.InitialLevel); this.WriteNewLine(moduleOutput, Emitter.INDENT + "export {" + module.Name + "};"); this.WriteIndent(moduleOutput, this.InitialLevel); moduleOutput.Append("}) ("); this.WriteNewLine(moduleOutput, ");"); } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using log4net; using System; using System.IO; using System.Reflection; namespace OpenSim.Framework { // The terrain is stored in the database as a blob with a 'revision' field. // Some implementations of terrain storage would fill the revision field with // the time the terrain was stored. When real revisions were added and this // feature removed, that left some old entries with the time in the revision // field. // Thus, if revision is greater than 'RevisionHigh' then terrain db entry is // left over and it is presumed to be 'Legacy256'. // Numbers are arbitrary and are chosen to to reduce possible mis-interpretation. // If a revision does not match any of these, it is assumed to be Legacy256. public enum DBTerrainRevision { // Terrain is 'double[256,256]' Legacy256 = 11, // Terrain is 'int32, int32, float[,]' where the ints are X and Y dimensions // The dimensions are presumed to be multiples of 16 and, more likely, multiples of 256. Variable2D = 22, // Terrain is 'int32, int32, int32, int16[]' where the ints are X and Y dimensions // and third int is the 'compression factor'. The heights are compressed as // "int compressedHeight = (int)(height * compressionFactor);" // The dimensions are presumed to be multiples of 16 and, more likely, multiples of 256. Compressed2D = 27, // A revision that is not listed above or any revision greater than this value is 'Legacy256'. RevisionHigh = 1234 } // Version of terrain that is a heightmap. // This should really be 'LLOptimizedHeightmapTerrainData' as it includes knowledge // of 'patches' which are 16x16 terrain areas which can be sent separately to the viewer. // The heighmap is kept as an array of integers. The integer values are converted to // and from floats by TerrainCompressionFactor. public class HeightmapTerrainData : TerrainData { private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); private static string LogHeader = "[HEIGHTMAP TERRAIN DATA]"; // TerrainData.CompressionFactor private float m_compressionFactor = 100.0f; private int[,] m_heightmap; // ============================================================= // Remember subregions of the heightmap that has changed. private bool[,] m_taint; // To keep with the legacy theme, create an instance of this class based on the // way terrain used to be passed around. public HeightmapTerrainData(double[,] pTerrain) { SizeX = pTerrain.GetLength(0); SizeY = pTerrain.GetLength(1); SizeZ = (int)Constants.RegionHeight; m_compressionFactor = 100.0f; m_heightmap = new int[SizeX, SizeY]; for (int ii = 0; ii < SizeX; ii++) { for (int jj = 0; jj < SizeY; jj++) { m_heightmap[ii, jj] = ToCompressedHeight(pTerrain[ii, jj]); } } // m_log.DebugFormat("{0} new by doubles. sizeX={1}, sizeY={2}, sizeZ={3}", LogHeader, SizeX, SizeY, SizeZ); m_taint = new bool[SizeX / Constants.TerrainPatchSize, SizeY / Constants.TerrainPatchSize]; ClearTaint(); } // Create underlying structures but don't initialize the heightmap assuming the caller will immediately do that public HeightmapTerrainData(int pX, int pY, int pZ) { SizeX = pX; SizeY = pY; SizeZ = pZ; m_compressionFactor = 100.0f; m_heightmap = new int[SizeX, SizeY]; m_taint = new bool[SizeX / Constants.TerrainPatchSize, SizeY / Constants.TerrainPatchSize]; // m_log.DebugFormat("{0} new by dimensions. sizeX={1}, sizeY={2}, sizeZ={3}", LogHeader, SizeX, SizeY, SizeZ); ClearTaint(); ClearLand(0f); } public HeightmapTerrainData(int[] cmap, float pCompressionFactor, int pX, int pY, int pZ) : this(pX, pY, pZ) { m_compressionFactor = pCompressionFactor; int ind = 0; for (int xx = 0; xx < SizeX; xx++) for (int yy = 0; yy < SizeY; yy++) m_heightmap[xx, yy] = cmap[ind++]; // m_log.DebugFormat("{0} new by compressed map. sizeX={1}, sizeY={2}, sizeZ={3}", LogHeader, SizeX, SizeY, SizeZ); } // Create a heighmap from a database blob public HeightmapTerrainData(int pSizeX, int pSizeY, int pSizeZ, int pFormatCode, byte[] pBlob) : this(pSizeX, pSizeY, pSizeZ) { switch ((DBTerrainRevision)pFormatCode) { case DBTerrainRevision.Compressed2D: FromCompressedTerrainSerialization(pBlob); m_log.DebugFormat("{0} HeightmapTerrainData create from Compressed2D serialization. Size=<{1},{2}>", LogHeader, SizeX, SizeY); break; default: FromLegacyTerrainSerialization(pBlob); m_log.DebugFormat("{0} HeightmapTerrainData create from legacy serialization. Size=<{1},{2}>", LogHeader, SizeX, SizeY); break; } } public override float CompressionFactor { get { return m_compressionFactor; } } // TerrainData.this[x, y] public override float this[int x, int y] { get { return FromCompressedHeight(m_heightmap[x, y]); } set { int newVal = ToCompressedHeight(value); if (m_heightmap[x, y] != newVal) { m_heightmap[x, y] = newVal; m_taint[x / Constants.TerrainPatchSize, y / Constants.TerrainPatchSize] = true; } } } // TerrainData.this[x, y, z] public override float this[int x, int y, int z] { get { return this[x, y]; } set { this[x, y] = value; } } // TerrainData.ClearLand public override void ClearLand() { ClearLand(DefaultTerrainHeight); } // TerrainData.ClearLand(float) public override void ClearLand(float pHeight) { int flatHeight = ToCompressedHeight(pHeight); for (int xx = 0; xx < SizeX; xx++) for (int yy = 0; yy < SizeY; yy++) m_heightmap[xx, yy] = flatHeight; } // TerrainData.ClearTaint public override void ClearTaint() { SetAllTaint(false); } // TerrainData.Clone public override TerrainData Clone() { HeightmapTerrainData ret = new HeightmapTerrainData(SizeX, SizeY, SizeZ); ret.m_heightmap = (int[,])this.m_heightmap.Clone(); return ret; } public float FromCompressedHeight(int pHeight) { return ((float)pHeight) / CompressionFactor; } // Initialize heightmap from blob consisting of: // int32, int32, int32, int32, int16[] // where the first int32 is format code, next two int32s are the X and y of heightmap data and // the forth int is the compression factor for the following int16s // This is just sets heightmap info. The actual size of the region was set on this instance's // creation and any heights not initialized by theis blob are set to the default height. public void FromCompressedTerrainSerialization(byte[] pBlob) { Int32 hmFormatCode, hmSizeX, hmSizeY, hmCompressionFactor; using (MemoryStream mstr = new MemoryStream(pBlob)) { using (BinaryReader br = new BinaryReader(mstr)) { hmFormatCode = br.ReadInt32(); hmSizeX = br.ReadInt32(); hmSizeY = br.ReadInt32(); hmCompressionFactor = br.ReadInt32(); m_compressionFactor = hmCompressionFactor; // In case database info doesn't match real terrain size, initialize the whole terrain. ClearLand(); for (int yy = 0; yy < hmSizeY; yy++) { for (int xx = 0; xx < hmSizeX; xx++) { Int16 val = br.ReadInt16(); if (xx < SizeX && yy < SizeY) m_heightmap[xx, yy] = val; } } } ClearTaint(); m_log.InfoFormat("{0} Read compressed 2d heightmap. Heightmap size=<{1},{2}>. Region size=<{3},{4}>. CompFact={5}", LogHeader, hmSizeX, hmSizeY, SizeX, SizeY, hmCompressionFactor); } } // Just create an array of doubles. Presumes the caller implicitly knows the size. public void FromLegacyTerrainSerialization(byte[] pBlob) { // In case database info doesn't match real terrain size, initialize the whole terrain. ClearLand(); using (MemoryStream mstr = new MemoryStream(pBlob)) { using (BinaryReader br = new BinaryReader(mstr)) { for (int xx = 0; xx < (int)Constants.RegionSize; xx++) { for (int yy = 0; yy < (int)Constants.RegionSize; yy++) { float val = (float)br.ReadDouble(); if (xx < SizeX && yy < SizeY) m_heightmap[xx, yy] = ToCompressedHeight(val); } } } ClearTaint(); } } // TerrainData.GetCompressedMap public override int[] GetCompressedMap() { int[] newMap = new int[SizeX * SizeY]; int ind = 0; for (int xx = 0; xx < SizeX; xx++) for (int yy = 0; yy < SizeY; yy++) newMap[ind++] = m_heightmap[xx, yy]; return newMap; } // TerrainData.GetDatabaseBlob // The user wants something to store in the database. public override bool GetDatabaseBlob(out int DBRevisionCode, out Array blob) { bool ret = false; if (SizeX == Constants.RegionSize && SizeY == Constants.RegionSize) { DBRevisionCode = (int)DBTerrainRevision.Legacy256; blob = ToLegacyTerrainSerialization(); ret = true; } else { DBRevisionCode = (int)DBTerrainRevision.Compressed2D; blob = ToCompressedTerrainSerialization(); ret = true; } return ret; } // TerrainData.GetDoubles public override double[,] GetDoubles() { double[,] ret = new double[SizeX, SizeY]; for (int xx = 0; xx < SizeX; xx++) for (int yy = 0; yy < SizeY; yy++) ret[xx, yy] = FromCompressedHeight(m_heightmap[xx, yy]); return ret; } // TerrainData.GetFloatsSerialized // This one dimensional version is ordered so height = map[y*sizeX+x]; // DEPRECATED: don't use this function as it does not retain the dimensions of the terrain // and the caller will probably do the wrong thing if the terrain is not the legacy 256x256. public override float[] GetFloatsSerialized() { int points = SizeX * SizeY; float[] heights = new float[points]; int idx = 0; for (int jj = 0; jj < SizeY; jj++) for (int ii = 0; ii < SizeX; ii++) { heights[idx++] = FromCompressedHeight(m_heightmap[ii, jj]); } return heights; } // Return 'true' of the patch that contains these region coordinates has been modified. // Note that checking the taint clears it. // There is existing code that relies on this feature. public override bool IsTaintedAt(int xx, int yy, bool clearOnTest) { int tx = xx / Constants.TerrainPatchSize; int ty = yy / Constants.TerrainPatchSize; bool ret = m_taint[tx, ty]; if (ret && clearOnTest) m_taint[tx, ty] = false; return ret; } // Old form that clears the taint flag when we check it. public override bool IsTaintedAt(int xx, int yy) { return IsTaintedAt(xx, yy, true /* clearOnTest */); } // TerrainData.TaintAllTerrain public override void TaintAllTerrain() { SetAllTaint(true); } // To save space (especially for large regions), keep the height as a short integer // that is coded as the float height times the compression factor (usually '100' // to make for two decimal points). public int ToCompressedHeight(double pHeight) { return (int)(pHeight * CompressionFactor); } // See the reader below. public Array ToCompressedTerrainSerialization() { Array ret = null; using (MemoryStream str = new MemoryStream((3 * sizeof(Int32)) + (SizeX * SizeY * sizeof(Int16)))) { using (BinaryWriter bw = new BinaryWriter(str)) { bw.Write((Int32)DBTerrainRevision.Compressed2D); bw.Write((Int32)SizeX); bw.Write((Int32)SizeY); bw.Write((Int32)CompressionFactor); for (int yy = 0; yy < SizeY; yy++) for (int xx = 0; xx < SizeX; xx++) { bw.Write((Int16)m_heightmap[xx, yy]); } } ret = str.ToArray(); } return ret; } // Just create an array of doubles. Presumes the caller implicitly knows the size. public Array ToLegacyTerrainSerialization() { Array ret = null; using (MemoryStream str = new MemoryStream((int)Constants.RegionSize * (int)Constants.RegionSize * sizeof(double))) { using (BinaryWriter bw = new BinaryWriter(str)) { for (int xx = 0; xx < Constants.RegionSize; xx++) { for (int yy = 0; yy < Constants.RegionSize; yy++) { double height = this[xx, yy]; if (height == 0.0) height = double.Epsilon; bw.Write(height); } } } ret = str.ToArray(); } return ret; } private void SetAllTaint(bool setting) { for (int ii = 0; ii < m_taint.GetLength(0); ii++) for (int jj = 0; jj < m_taint.GetLength(1); jj++) m_taint[ii, jj] = setting; } } public abstract class TerrainData { // A height used when the user doesn't specify anything public const float DefaultTerrainHeight = 21f; public abstract float CompressionFactor { get; } // Terrain always is a square public int SizeX { get; protected set; } public int SizeY { get; protected set; } public int SizeZ { get; protected set; } public abstract float this[int x, int y] { get; set; } // Someday terrain will have caves public abstract float this[int x, int y, int z] { get; set; } // Given a revision code and a blob from the database, create and return the right type of TerrainData. // The sizes passed are the expected size of the region. The database info will be used to // initialize the heightmap of that sized region with as much data is in the blob. // Return created TerrainData or 'null' if unsuccessful. public static TerrainData CreateFromDatabaseBlobFactory(int pSizeX, int pSizeY, int pSizeZ, int pFormatCode, byte[] pBlob) { // For the moment, there is only one implementation class return new HeightmapTerrainData(pSizeX, pSizeY, pSizeZ, pFormatCode, pBlob); } public abstract void ClearLand(); public abstract void ClearLand(float height); public abstract void ClearTaint(); public abstract TerrainData Clone(); // return a special compressed representation of the heightmap in ints public abstract int[] GetCompressedMap(); // Return a representation of this terrain for storing as a blob in the database. // Returns 'true' to say blob was stored in the 'out' locations. public abstract bool GetDatabaseBlob(out int DBFormatRevisionCode, out Array blob); public abstract double[,] GetDoubles(); public abstract float[] GetFloatsSerialized(); public abstract bool IsTaintedAt(int xx, int yy); public abstract bool IsTaintedAt(int xx, int yy, bool clearOnTest); public abstract void TaintAllTerrain(); } }
#region Copyright // Copyright 2014 Myrcon Pty. Ltd. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #endregion using System; using NUnit.Framework; using Potato.Net.Shared.Test.Mocks; namespace Potato.Net.Shared.Test { [TestFixture] public class PacketQueueTest { /// <summary> /// Tests that a packet is returned immediately from telling the queue it has been sent, /// inferring the packet is not queued and should be sent immediately. /// </summary> [Test] public void TestPacketSendImmediate() { PacketQueue queue = new PacketQueue(); IPacketWrapper packet = new MockPacket() { Packet = { Origin = PacketOrigin.Client, Type = PacketType.Request, RequestId = 1 } }; IPacketWrapper poppedPacket = queue.PacketSend(packet); // Client would send to the server. Assert.AreEqual(packet, poppedPacket); Assert.AreEqual(1, queue.OutgoingPackets.Count); Assert.AreEqual(0, queue.QueuedPackets.Count); } /// <summary> /// Tests that a packet will not be sent to the server if another packet /// is currently sent to the server and awaiting a response. /// </summary> [Test] public void TestPacketSendQueued() { PacketQueue queue = new PacketQueue(); IPacketWrapper firstPacket = new MockPacket() { Packet = { Origin = PacketOrigin.Client, Type = PacketType.Request, RequestId = 1 } }; IPacketWrapper poppedPacket = queue.PacketSend(firstPacket); // Client would send to the server. Assert.AreEqual(firstPacket, poppedPacket); Assert.AreEqual(1, queue.OutgoingPackets.Count); IPacketWrapper secondPacket = new MockPacket() { Packet = { Origin = PacketOrigin.Client, Type = PacketType.Request, RequestId = 1 } }; poppedPacket = queue.PacketSend(secondPacket); // Popped packet is null, client would essentially ignore it until later. Assert.IsNull(poppedPacket); Assert.AreEqual(1, queue.OutgoingPackets.Count); Assert.AreEqual(1, queue.QueuedPackets.Count); } /// <summary> /// Tests that response to a request will remove from the outgoing list of packets. /// </summary> [Test] public void TestPacketReceivedRemoveFromOutgoing() { PacketQueue queue = new PacketQueue(); IPacketWrapper sentPacket = new MockPacket() { Packet = { Origin = PacketOrigin.Client, Type = PacketType.Request, RequestId = 1 } }; IPacketWrapper recievedPacket = new MockPacket() { Packet = { Origin = PacketOrigin.Client, Type = PacketType.Response, RequestId = 1 } }; IPacketWrapper poppedPacket = queue.PacketSend(sentPacket); // Client would send to the server. Assert.AreEqual(sentPacket, poppedPacket); Assert.AreEqual(1, queue.OutgoingPackets.Count); Assert.AreEqual(0, queue.QueuedPackets.Count); poppedPacket = queue.PacketReceived(recievedPacket); Assert.IsNull(poppedPacket); Assert.AreEqual(0, queue.OutgoingPackets.Count); Assert.AreEqual(0, queue.QueuedPackets.Count); } /// <summary> /// Tests that a packet will be removed from the outgoing packets and a new packet is dequeued. /// </summary> [Test] public void TestPacketReceivedRemovedAndPopped() { PacketQueue queue = new PacketQueue(); IPacketWrapper firstPacketRequest = new MockPacket() { Packet = { Origin = PacketOrigin.Client, Type = PacketType.Request, RequestId = 1 } }; IPacketWrapper secondPacketRequest = new MockPacket() { Packet = { Origin = PacketOrigin.Client, Type = PacketType.Request, RequestId = 2 } }; IPacketWrapper firstPacketResponse = new MockPacket() { Packet = { Origin = PacketOrigin.Client, Type = PacketType.Response, RequestId = 1 } }; queue.PacketSend(firstPacketRequest); queue.PacketSend(secondPacketRequest); Assert.AreEqual(1, queue.OutgoingPackets.Count); Assert.AreEqual(1, queue.QueuedPackets.Count); IPacketWrapper poppedPacket = queue.PacketReceived(firstPacketResponse); Assert.AreEqual(secondPacketRequest, poppedPacket); queue.PacketSend(poppedPacket); Assert.AreEqual(1, queue.OutgoingPackets.Count); Assert.AreEqual(0, queue.QueuedPackets.Count); } /// <summary> /// Tests that a connection restart will be required if a packet has expired (2 minutes) /// </summary> [Test] public void TestRestartConnectionOnQueueFailureTruey() { PacketQueue queue = new PacketQueue(); IPacketWrapper packet = new MockPacket() { Packet = { Origin = PacketOrigin.Client, Type = PacketType.Request, RequestId = 1, Stamp = DateTime.Now.AddMinutes(-5) } }; IPacketWrapper poppedPacket = queue.PacketSend(packet); Assert.AreEqual(packet, poppedPacket); Assert.AreEqual(1, queue.OutgoingPackets.Count); Assert.IsTrue(queue.RestartConnectionOnQueueFailure()); } /// <summary> /// Tests that no restart of a connection will be required if the queue contains /// no old packets. /// </summary> [Test] public void TestRestartConnectionOnQueueFailureFalsey() { PacketQueue queue = new PacketQueue(); IPacketWrapper packet = new MockPacket() { Packet = { Origin = PacketOrigin.Client, Type = PacketType.Request, RequestId = 1 } }; IPacketWrapper poppedPacket = queue.PacketSend(packet); Assert.AreEqual(packet, poppedPacket); Assert.AreEqual(1, queue.OutgoingPackets.Count); Assert.IsFalse(queue.RestartConnectionOnQueueFailure()); } /// <summary> /// Tests that we can get the original request packet given a response packet tht exists /// </summary> [Test] public void TestGetRequestPacketExists() { PacketQueue queue = new PacketQueue(); IPacketWrapper packetRequest = new MockPacket() { Packet = { Origin = PacketOrigin.Client, Type = PacketType.Request, RequestId = 1 } }; IPacketWrapper packetResponse = new MockPacket() { Packet = { Origin = PacketOrigin.Client, Type = PacketType.Response, RequestId = 1 } }; queue.PacketSend(packetRequest); Assert.AreEqual(1, queue.OutgoingPackets.Count); IPacketWrapper fetchedRequestPacket = queue.GetRequestPacket(packetResponse); Assert.AreEqual(packetRequest, fetchedRequestPacket); } /// <summary> /// Tests that we get a null value back when no request has been requested /// from a response. /// </summary> [Test] public void TestGetRequestPacketDoesNotExists() { PacketQueue queue = new PacketQueue(); IPacketWrapper packetRequest = new MockPacket() { Packet = { Origin = PacketOrigin.Client, Type = PacketType.Request, RequestId = 1 } }; IPacketWrapper packetResponse = new MockPacket() { Packet = { Origin = PacketOrigin.Client, Type = PacketType.Response, RequestId = 2 } }; queue.PacketSend(packetRequest); Assert.AreEqual(1, queue.OutgoingPackets.Count); IPacketWrapper fetchedRequestPacket = queue.GetRequestPacket(packetResponse); Assert.IsNull(fetchedRequestPacket); } } }
// ------------------------------------------------------------------------------ // <copyright from='2002' to='2002' company='Scott Hanselman'> // Copyright (c) Scott Hanselman. All Rights Reserved. // </copyright> // ------------------------------------------------------------------------------ // // Scott Hanselman's Tiny Academic Virtual CPU and OS // Copyright (c) 2002, Scott Hanselman ([email protected]) // All rights reserved. // // A BSD License // Redistribution and use in source and binary forms, with or without modification, // are permitted provided that the following conditions are met: // // Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // Neither the name of Scott Hanselman nor the names of its contributors // may be used to endorse or promote products derived from this software without // specific prior written permission. // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, // THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS // BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; // LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE // OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. // namespace Hanselman.CST352 { using System; using System.Collections; /// <summary> /// A collection that stores <see cref='Hanselman.CST352.Instruction'/> objects. /// </summary> /// <seealso cref='Hanselman.CST352.InstructionCollection'/> [Serializable()] public class InstructionCollection : CollectionBase { /// <summary> /// Initializes a new instance of <see cref='Hanselman.CST352.InstructionCollection'/>. /// </summary> public InstructionCollection() { } /// <summary> /// Initializes a new instance of <see cref='Hanselman.CST352.InstructionCollection'/> based on another <see cref='Hanselman.CST352.InstructionCollection'/>. /// </summary> /// <param name='value'> /// A <see cref='Hanselman.CST352.InstructionCollection'/> from which the contents are copied /// </param> public InstructionCollection(InstructionCollection value) { this.AddRange(value); } /// <summary> /// Initializes a new instance of <see cref='Hanselman.CST352.InstructionCollection'/> containing any array of <see cref='Hanselman.CST352.Instruction'/> objects. /// </summary> /// <param name='value'> /// A array of <see cref='Hanselman.CST352.Instruction'/> objects with which to intialize the collection /// </param> public InstructionCollection(Instruction[] value) { this.AddRange(value); } /// <summary> /// Represents the entry at the specified index of the <see cref='Hanselman.CST352.Instruction'/>. /// </summary> /// <param name='index'>The zero-based index of the entry to locate in the collection.</param> /// <value> /// The entry at the specified index of the collection. /// </value> /// <exception cref='System.ArgumentOutOfRangeException'><paramref name='index'/> is outside the valid range of indexes for the collection.</exception> public Instruction this[int index] { get { return ((Instruction)(List[index])); } set { List[index] = value; } } /// <summary> /// Adds a <see cref='Hanselman.CST352.Instruction'/> with the specified value to the /// <see cref='Hanselman.CST352.InstructionCollection'/> . /// </summary> /// <param name='value'>The <see cref='Hanselman.CST352.Instruction'/> to add.</param> /// <returns> /// The index at which the new element was inserted. /// </returns> /// <seealso cref='Hanselman.CST352.InstructionCollection.AddRange(Instruction[])'/> public int Add(Instruction value) { return List.Add(value); } /// <summary> /// Copies the elements of an array to the end of the <see cref='Hanselman.CST352.InstructionCollection'/>. /// </summary> /// <param name='value'> /// An array of type <see cref='Hanselman.CST352.Instruction'/> containing the objects to add to the collection. /// </param> /// <returns> /// None. /// </returns> /// <seealso cref='Hanselman.CST352.InstructionCollection.Add'/> public void AddRange(Instruction[] value) { for (int i = 0; (i < value.Length); i = (i + 1)) { this.Add(value[i]); } } /// <summary> /// /// Adds the contents of another <see cref='Hanselman.CST352.InstructionCollection'/> to the end of the collection. /// /// </summary> /// <param name='value'> /// A <see cref='Hanselman.CST352.InstructionCollection'/> containing the objects to add to the collection. /// </param> /// <returns> /// None. /// </returns> /// <seealso cref='Hanselman.CST352.InstructionCollection.Add'/> public void AddRange(InstructionCollection value) { for (int i = 0; (i < value.Count); i = (i + 1)) { this.Add(value[i]); } } /// <summary> /// Gets a value indicating whether the /// <see cref='Hanselman.CST352.InstructionCollection'/> contains the specified <see cref='Hanselman.CST352.Instruction'/>. /// </summary> /// <param name='value'>The <see cref='Hanselman.CST352.Instruction'/> to locate.</param> /// <returns> /// <see langword='true'/> if the <see cref='Hanselman.CST352.Instruction'/> is contained in the collection; /// otherwise, <see langword='false'/>. /// </returns> /// <seealso cref='Hanselman.CST352.InstructionCollection.IndexOf'/> public bool Contains(Instruction value) { return List.Contains(value); } /// <summary> /// Copies the <see cref='Hanselman.CST352.InstructionCollection'/> values to a one-dimensional <see cref='System.Array'/> instance at the /// specified index. /// </summary> /// <param name='array'>The one-dimensional <see cref='System.Array'/> that is the destination of the values copied from <see cref='Hanselman.CST352.InstructionCollection'/> .</param> /// <param name='index'>The index in <paramref name='array'/> where copying begins.</param> /// <returns> /// None. /// </returns> /// <exception cref='System.ArgumentException'><paramref name='array'/> is multidimensional. -or- The number of elements in the <see cref='Hanselman.CST352.InstructionCollection'/> is greater than the available space between <paramref name='index'/> and the end of <paramref name='array'/>.</exception> /// <exception cref='System.ArgumentNullException'><paramref name='array'/> is <see langword='null'/>. </exception> /// <exception cref='System.ArgumentOutOfRangeException'><paramref name='index'/> is less than <paramref name='array'/>'s lowbound. </exception> /// <seealso cref='System.Array'/> public void CopyTo(Instruction[] array, int index) { List.CopyTo(array, index); } /// <summary> /// Returns the index of a <see cref='Hanselman.CST352.Instruction'/> in /// the <see cref='Hanselman.CST352.InstructionCollection'/> . /// </summary> /// <param name='value'>The <see cref='Hanselman.CST352.Instruction'/> to locate.</param> /// <returns> /// The index of the <see cref='Hanselman.CST352.Instruction'/> of <paramref name='value'/> in the /// <see cref='Hanselman.CST352.InstructionCollection'/>, if found; otherwise, -1. /// </returns> /// <seealso cref='Hanselman.CST352.InstructionCollection.Contains'/> public int IndexOf(Instruction value) { return List.IndexOf(value); } /// <summary> /// Inserts a <see cref='Hanselman.CST352.Instruction'/> into the <see cref='Hanselman.CST352.InstructionCollection'/> at the specified index. /// </summary> /// <param name='index'>The zero-based index where <paramref name='value'/> should be inserted.</param> /// <param name=' value'>The <see cref='Hanselman.CST352.Instruction'/> to insert.</param> /// <returns>None.</returns> /// <seealso cref='Hanselman.CST352.InstructionCollection.Add'/> public void Insert(int index, Instruction value) { List.Insert(index, value); } /// <summary> /// Returns an enumerator that can iterate through /// the <see cref='Hanselman.CST352.InstructionCollection'/> . /// </summary> /// <returns>None.</returns> /// <seealso cref='System.Collections.IEnumerator'/> public new InstructionEnumerator GetEnumerator() { return new InstructionEnumerator(this); } /// <summary> /// Removes a specific <see cref='Hanselman.CST352.Instruction'/> from the /// <see cref='Hanselman.CST352.InstructionCollection'/> . /// </summary> /// <param name='value'>The <see cref='Hanselman.CST352.Instruction'/> to remove from the <see cref='Hanselman.CST352.InstructionCollection'/> .</param> /// <returns>None.</returns> /// <exception cref='System.ArgumentException'><paramref name='value'/> is not found in the Collection. </exception> public void Remove(Instruction value) { List.Remove(value); } /// <summary> /// Provided for "foreach" support with this collection /// </summary> public class InstructionEnumerator : object, IEnumerator { private IEnumerator baseEnumerator; private IEnumerable temp; /// <summary> /// Public constructor for an InstructionEnumerator /// </summary> /// <param name="mappings">The <see cref="InstructionCollection"/>we are going to iterate over</param> public InstructionEnumerator(InstructionCollection mappings) { this.temp = ((IEnumerable)(mappings)); this.baseEnumerator = temp.GetEnumerator(); } /// <summary> /// The current <see cref="Instruction"/> /// </summary> public Instruction Current { get { return ((Instruction)(baseEnumerator.Current)); } } /// <summary> /// The current IEnumerator interface /// </summary> object IEnumerator.Current { get { return baseEnumerator.Current; } } /// <summary> /// Move to the next Instruction /// </summary> /// <returns>true or false based on success</returns> public bool MoveNext() { return baseEnumerator.MoveNext(); } /// <summary> /// Move to the next Instruction /// </summary> /// <returns>true or false based on success</returns> bool IEnumerator.MoveNext() { return baseEnumerator.MoveNext(); } /// <summary> /// Reset the cursor /// </summary> public void Reset() { baseEnumerator.Reset(); } /// <summary> /// Reset the cursor /// </summary> void IEnumerator.Reset() { baseEnumerator.Reset(); } } } }
// // Copyright 2011-2013, Xamarin Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // using System; using System.Threading.Tasks; using System.Threading; #if __UNIFIED__ using CoreLocation; using Foundation; using UIKit; #else using MonoTouch.CoreLocation; using MonoTouch.Foundation; using MonoTouch.UIKit; #endif using Plugin.Geolocator.Abstractions; namespace Plugin.Geolocator { /// <summary> /// Implementation for Geolocator /// </summary> public class GeolocatorImplementation : IGeolocator { bool deferringUpdates; public GeolocatorImplementation() { DesiredAccuracy = 100; manager = GetManager(); manager.AuthorizationChanged += OnAuthorizationChanged; manager.Failed += OnFailed; if (UIDevice.CurrentDevice.CheckSystemVersion(6, 0)) manager.LocationsUpdated += OnLocationsUpdated; else manager.UpdatedLocation += OnUpdatedLocation; manager.UpdatedHeading += OnUpdatedHeading; manager.DeferredUpdatesFinished += OnDeferredUpdatedFinished; RequestAuthorization(); } void OnDeferredUpdatedFinished (object sender, NSErrorEventArgs e) { deferringUpdates = false; } void RequestAuthorization() { var info = NSBundle.MainBundle.InfoDictionary; if (UIDevice.CurrentDevice.CheckSystemVersion(8, 0)) { if (info.ContainsKey(new NSString("NSLocationWhenInUseUsageDescription"))) manager.RequestWhenInUseAuthorization(); else if (info.ContainsKey(new NSString("NSLocationAlwaysUsageDescription"))) manager.RequestAlwaysAuthorization(); else throw new UnauthorizedAccessException("On iOS 8.0 and higher you must set either NSLocationWhenInUseUsageDescription or NSLocationAlwaysUsageDescription in your Info.plist file to enable Authorization Requests for Location updates!"); } } /// <inheritdoc/> public event EventHandler<PositionErrorEventArgs> PositionError; /// <inheritdoc/> public event EventHandler<PositionEventArgs> PositionChanged; /// <inheritdoc/> public double DesiredAccuracy { get; set; } /// <inheritdoc/> public bool IsListening { get { return isListening; } } /// <inheritdoc/> public bool SupportsHeading { get { return CLLocationManager.HeadingAvailable; } } ListenerSettings listenerSettings; /// <inheritdoc/> public bool IsGeolocationAvailable { get { return true; } // all iOS devices support at least wifi geolocation } /// <inheritdoc/> public bool IsGeolocationEnabled { get { var status = CLLocationManager.Status; if (UIDevice.CurrentDevice.CheckSystemVersion(8, 0)) { return status == CLAuthorizationStatus.AuthorizedAlways || status == CLAuthorizationStatus.AuthorizedWhenInUse; } return status == CLAuthorizationStatus.Authorized; } } /// <inheritdoc/> public Task<Position> GetPositionAsync(int timeoutMilliseconds = Timeout.Infinite, CancellationToken? cancelToken = null, bool includeHeading = false) { if (timeoutMilliseconds <= 0 && timeoutMilliseconds != Timeout.Infinite) throw new ArgumentOutOfRangeException("timeoutMilliseconds", "Timeout must be positive or Timeout.Infinite"); if (!cancelToken.HasValue) cancelToken = CancellationToken.None; TaskCompletionSource<Position> tcs; if (!IsListening) { var m = GetManager(); // permit background updates if background location mode is enabled if (UIDevice.CurrentDevice.CheckSystemVersion(9, 0)) { NSArray backgroundModes = NSBundle.MainBundle.InfoDictionary[(NSString)"UIBackgroundModes"] as NSArray; m.AllowsBackgroundLocationUpdates = backgroundModes.Contains((NSString)"Location") || backgroundModes.Contains((NSString)"location"); } // always prevent location update pausing since we're only listening for a single update. if (UIDevice.CurrentDevice.CheckSystemVersion(6, 0)) m.PausesLocationUpdatesAutomatically = false; tcs = new TaskCompletionSource<Position>(m); var singleListener = new GeolocationSingleUpdateDelegate(m, DesiredAccuracy, includeHeading, timeoutMilliseconds, cancelToken.Value); m.Delegate = singleListener; m.StartUpdatingLocation(); if (includeHeading && SupportsHeading) m.StartUpdatingHeading(); return singleListener.Task; } tcs = new TaskCompletionSource<Position>(); if (position == null) { EventHandler<PositionErrorEventArgs> gotError = null; gotError = (s, e) => { tcs.TrySetException(new GeolocationException(e.Error)); PositionError -= gotError; }; PositionError += gotError; EventHandler<PositionEventArgs> gotPosition = null; gotPosition = (s, e) => { tcs.TrySetResult(e.Position); PositionChanged -= gotPosition; }; PositionChanged += gotPosition; } else tcs.SetResult(position); return tcs.Task; } bool CanDeferLocationUpdate { get { return UIDevice.CurrentDevice.CheckSystemVersion(6, 0); } } /// <inheritdoc/> public Task<bool> StartListeningAsync(int minTime, double minDistance, bool includeHeading = false, ListenerSettings settings = null) { if (minTime < 0) throw new ArgumentOutOfRangeException("minTime"); if (minDistance < 0) throw new ArgumentOutOfRangeException("minDistance"); if (isListening) throw new InvalidOperationException("Already listening"); // if no settings were passed in, instantiate the default settings. need to check this and create default settings since // previous calls to StartListeningAsync might have already configured the location manager in a non-default way that the // caller of this method might not be expecting. the caller should expect the defaults if they pass no settings. if (settings == null) settings = new ListenerSettings(); // keep reference to settings so that we can stop the listener appropriately later listenerSettings = settings; double desiredAccuracy = DesiredAccuracy; #region apply settings to location manager // set background flag if (UIDevice.CurrentDevice.CheckSystemVersion(9, 0)) manager.AllowsBackgroundLocationUpdates = settings.AllowBackgroundUpdates; // configure location update pausing if (UIDevice.CurrentDevice.CheckSystemVersion(6, 0)) { manager.PausesLocationUpdatesAutomatically = settings.PauseLocationUpdatesAutomatically; if (settings.ActivityType == ActivityType.AutomotiveNavigation) manager.ActivityType = CLActivityType.AutomotiveNavigation; else if (settings.ActivityType == ActivityType.Fitness) manager.ActivityType = CLActivityType.Fitness; else if (settings.ActivityType == ActivityType.Other) manager.ActivityType = CLActivityType.Other; else if (settings.ActivityType == ActivityType.OtherNavigation) manager.ActivityType = CLActivityType.OtherNavigation; } // to use deferral, CLLocationManager.DistanceFilter must be set to CLLocationDistance.None, and CLLocationManager.DesiredAccuracy must be // either CLLocation.AccuracyBest or CLLocation.AccuracyBestForNavigation. deferral only available on iOS 6.0 and above. if (CanDeferLocationUpdate && settings.DeferLocationUpdates) { minDistance = CLLocationDistance.FilterNone; desiredAccuracy = CLLocation.AccuracyBest; } #endregion isListening = true; manager.DesiredAccuracy = desiredAccuracy; manager.DistanceFilter = minDistance; if (settings?.ListenForSignificantChanges ?? false) manager.StartMonitoringSignificantLocationChanges(); else manager.StartUpdatingLocation(); if (includeHeading && CLLocationManager.HeadingAvailable) manager.StartUpdatingHeading(); return Task.FromResult(true); } /// <inheritdoc/> public Task<bool> StopListeningAsync() { if (!isListening) return Task.FromResult(true); isListening = false; if (CLLocationManager.HeadingAvailable) manager.StopUpdatingHeading(); // it looks like deferred location updates can apply to the standard service or significant change service. disallow deferral in either case. if ((listenerSettings?.DeferLocationUpdates ?? false) && CanDeferLocationUpdate) manager.DisallowDeferredLocationUpdates(); if (listenerSettings?.ListenForSignificantChanges ?? false) manager.StopMonitoringSignificantLocationChanges(); else manager.StopUpdatingLocation(); listenerSettings = null; position = null; return Task.FromResult(true); } readonly CLLocationManager manager; bool isListening; Position position; CLLocationManager GetManager() { CLLocationManager m = null; new NSObject().InvokeOnMainThread(() => m = new CLLocationManager()); return m; } void OnUpdatedHeading(object sender, CLHeadingUpdatedEventArgs e) { if (e.NewHeading.TrueHeading == -1) return; var p = (position == null) ? new Position() : new Position(this.position); p.Heading = e.NewHeading.TrueHeading; this.position = p; OnPositionChanged(new PositionEventArgs(p)); } void OnLocationsUpdated(object sender, CLLocationsUpdatedEventArgs e) { foreach (CLLocation location in e.Locations) UpdatePosition(location); // defer future location updates if requested if ((listenerSettings?.DeferLocationUpdates ?? false) && !deferringUpdates && CanDeferLocationUpdate) { manager.AllowDeferredLocationUpdatesUntil(listenerSettings.DeferralDistanceMeters == null ? CLLocationDistance.MaxDistance : listenerSettings.DeferralDistanceMeters.GetValueOrDefault(), listenerSettings.DeferralTime == null ? CLLocationManager.MaxTimeInterval : listenerSettings.DeferralTime.GetValueOrDefault().TotalSeconds); deferringUpdates = true; } } void OnUpdatedLocation(object sender, CLLocationUpdatedEventArgs e) { UpdatePosition(e.NewLocation); } void UpdatePosition(CLLocation location) { var p = (position == null) ? new Position() : new Position(this.position); if (location.HorizontalAccuracy > -1) { p.Accuracy = location.HorizontalAccuracy; p.Latitude = location.Coordinate.Latitude; p.Longitude = location.Coordinate.Longitude; } if (location.VerticalAccuracy > -1) { p.Altitude = location.Altitude; p.AltitudeAccuracy = location.VerticalAccuracy; } if (location.Speed > -1) p.Speed = location.Speed; var dateTime = (DateTime) location.Timestamp; p.Timestamp = new DateTimeOffset(dateTime); position = p; OnPositionChanged(new PositionEventArgs(p)); location.Dispose(); } void OnFailed(object sender, NSErrorEventArgs e) { if ((CLError)(int)e.Error.Code == CLError.Network) OnPositionError(new PositionErrorEventArgs(GeolocationError.PositionUnavailable)); } void OnAuthorizationChanged(object sender, CLAuthorizationChangedEventArgs e) { if (e.Status == CLAuthorizationStatus.Denied || e.Status == CLAuthorizationStatus.Restricted) OnPositionError(new PositionErrorEventArgs(GeolocationError.Unauthorized)); } void OnPositionChanged(PositionEventArgs e) => PositionChanged?.Invoke(this, e); async void OnPositionError(PositionErrorEventArgs e) { await StopListeningAsync(); PositionError?.Invoke(this, e); } } }
#pragma warning disable 109, 114, 219, 429, 168, 162 namespace pony.unity3d.scene.ucore { public class RotorUCore : global::UnityEngine.MonoBehaviour, global::haxe.lang.IHxObject { public RotorUCore(global::haxe.lang.EmptyObject empty) : base() { unchecked { } #line default } public RotorUCore() : base() { unchecked { #line 43 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" this.speed = ((float) (200) ); #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" this.withTimeScale = true; #line 48 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" this.direct = new global::UnityEngine.Vector3(((float) (1) ), ((float) (0) ), ((float) (0) )); } #line default } public static object __hx_createEmpty() { unchecked { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return new global::pony.unity3d.scene.ucore.RotorUCore(((global::haxe.lang.EmptyObject) (global::haxe.lang.EmptyObject.EMPTY) )); } #line default } public static object __hx_create(global::Array arr) { unchecked { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return new global::pony.unity3d.scene.ucore.RotorUCore(); } #line default } public bool withTimeScale; public float speed; public global::UnityEngine.Vector3 direct; public virtual void Update() { unchecked { #line 52 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" float sp = default(float); #line 52 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" if (this.withTimeScale) { #line 52 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" sp = global::UnityEngine.Time.deltaTime; } else { #line 52 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" sp = global::UnityEngine.Time.fixedDeltaTime; } sp *= this.speed; this.transform.Rotate(((global::UnityEngine.Vector3) (this.direct) ), ((float) (sp) )); } #line default } public virtual bool __hx_deleteField(string field, int hash) { unchecked { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return false; } #line default } public virtual object __hx_lookupField(string field, int hash, bool throwErrors, bool isCheck) { unchecked { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" if (isCheck) { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return global::haxe.lang.Runtime.undefined; } else { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" if (throwErrors) { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" throw global::haxe.lang.HaxeException.wrap("Field not found."); } else { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return default(object); } } } #line default } public virtual double __hx_lookupField_f(string field, int hash, bool throwErrors) { unchecked { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" if (throwErrors) { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" throw global::haxe.lang.HaxeException.wrap("Field not found or incompatible field type."); } else { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return default(double); } } #line default } public virtual object __hx_lookupSetField(string field, int hash, object @value) { unchecked { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" throw global::haxe.lang.HaxeException.wrap("Cannot access field for writing."); } #line default } public virtual double __hx_lookupSetField_f(string field, int hash, double @value) { unchecked { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" throw global::haxe.lang.HaxeException.wrap("Cannot access field for writing or incompatible type."); } #line default } public virtual double __hx_setField_f(string field, int hash, double @value, bool handleProperties) { unchecked { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" switch (hash) { case 23697287: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" this.speed = ((float) (@value) ); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return @value; } default: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return this.__hx_lookupSetField_f(field, hash, @value); } } } #line default } public virtual object __hx_setField(string field, int hash, object @value, bool handleProperties) { unchecked { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" switch (hash) { case 1575675685: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" this.hideFlags = ((global::UnityEngine.HideFlags) (@value) ); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return @value; } case 1224700491: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" this.name = global::haxe.lang.Runtime.toString(@value); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return @value; } case 5790298: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" this.tag = global::haxe.lang.Runtime.toString(@value); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return @value; } case 373703110: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" this.active = ((bool) (@value) ); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return @value; } case 2117141633: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" this.enabled = ((bool) (@value) ); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return @value; } case 896046654: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" this.useGUILayout = ((bool) (@value) ); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return @value; } case 1013599017: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" this.direct = ((global::UnityEngine.Vector3) (@value) ); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return @value; } case 23697287: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" this.speed = ((float) (global::haxe.lang.Runtime.toInt(@value)) ); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return @value; } case 916069463: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" this.withTimeScale = ((bool) (@value) ); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return @value; } default: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return this.__hx_lookupSetField(field, hash, @value); } } } #line default } public virtual object __hx_getField(string field, int hash, bool throwErrors, bool isCheck, bool handleProperties) { unchecked { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" switch (hash) { case 1826409040: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return ((global::haxe.lang.Function) (new global::haxe.lang.Closure(((object) (this) ), global::haxe.lang.Runtime.toString("GetType"), ((int) (1826409040) ))) ); } case 304123084: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return ((global::haxe.lang.Function) (new global::haxe.lang.Closure(((object) (this) ), global::haxe.lang.Runtime.toString("ToString"), ((int) (304123084) ))) ); } case 276486854: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return ((global::haxe.lang.Function) (new global::haxe.lang.Closure(((object) (this) ), global::haxe.lang.Runtime.toString("GetInstanceID"), ((int) (276486854) ))) ); } case 295397041: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return ((global::haxe.lang.Function) (new global::haxe.lang.Closure(((object) (this) ), global::haxe.lang.Runtime.toString("GetHashCode"), ((int) (295397041) ))) ); } case 1955029599: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return ((global::haxe.lang.Function) (new global::haxe.lang.Closure(((object) (this) ), global::haxe.lang.Runtime.toString("Equals"), ((int) (1955029599) ))) ); } case 1575675685: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return this.hideFlags; } case 1224700491: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return this.name; } case 294420221: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return ((global::haxe.lang.Function) (new global::haxe.lang.Closure(((object) (this) ), global::haxe.lang.Runtime.toString("SendMessageUpwards"), ((int) (294420221) ))) ); } case 139469119: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return ((global::haxe.lang.Function) (new global::haxe.lang.Closure(((object) (this) ), global::haxe.lang.Runtime.toString("SendMessage"), ((int) (139469119) ))) ); } case 967979664: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return ((global::haxe.lang.Function) (new global::haxe.lang.Closure(((object) (this) ), global::haxe.lang.Runtime.toString("GetComponentsInChildren"), ((int) (967979664) ))) ); } case 2122408236: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return ((global::haxe.lang.Function) (new global::haxe.lang.Closure(((object) (this) ), global::haxe.lang.Runtime.toString("GetComponents"), ((int) (2122408236) ))) ); } case 1328964235: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return ((global::haxe.lang.Function) (new global::haxe.lang.Closure(((object) (this) ), global::haxe.lang.Runtime.toString("GetComponentInChildren"), ((int) (1328964235) ))) ); } case 1723652455: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return ((global::haxe.lang.Function) (new global::haxe.lang.Closure(((object) (this) ), global::haxe.lang.Runtime.toString("GetComponent"), ((int) (1723652455) ))) ); } case 89600725: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return ((global::haxe.lang.Function) (new global::haxe.lang.Closure(((object) (this) ), global::haxe.lang.Runtime.toString("CompareTag"), ((int) (89600725) ))) ); } case 2134927590: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return ((global::haxe.lang.Function) (new global::haxe.lang.Closure(((object) (this) ), global::haxe.lang.Runtime.toString("BroadcastMessage"), ((int) (2134927590) ))) ); } case 5790298: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return this.tag; } case 373703110: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return this.active; } case 1471506513: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return this.gameObject; } case 1751728597: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return this.particleSystem; } case 524620744: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return this.particleEmitter; } case 964013983: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return this.hingeJoint; } case 1238753076: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return this.collider; } case 674101152: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return this.guiTexture; } case 262266241: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return this.guiElement; } case 1515196979: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return this.networkView; } case 801759432: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return this.guiText; } case 662730966: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return this.audio; } case 853263683: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return this.renderer; } case 1431885287: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return this.constantForce; } case 1261760260: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return this.animation; } case 1962709206: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return this.light; } case 931940005: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return this.camera; } case 1895479501: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return this.rigidbody; } case 1167273324: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return this.transform; } case 2117141633: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return this.enabled; } case 2084823382: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return ((global::haxe.lang.Function) (new global::haxe.lang.Closure(((object) (this) ), global::haxe.lang.Runtime.toString("StopCoroutine"), ((int) (2084823382) ))) ); } case 1856815770: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return ((global::haxe.lang.Function) (new global::haxe.lang.Closure(((object) (this) ), global::haxe.lang.Runtime.toString("StopAllCoroutines"), ((int) (1856815770) ))) ); } case 832859768: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return ((global::haxe.lang.Function) (new global::haxe.lang.Closure(((object) (this) ), global::haxe.lang.Runtime.toString("StartCoroutine_Auto"), ((int) (832859768) ))) ); } case 987108662: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return ((global::haxe.lang.Function) (new global::haxe.lang.Closure(((object) (this) ), global::haxe.lang.Runtime.toString("StartCoroutine"), ((int) (987108662) ))) ); } case 602588383: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return ((global::haxe.lang.Function) (new global::haxe.lang.Closure(((object) (this) ), global::haxe.lang.Runtime.toString("IsInvoking"), ((int) (602588383) ))) ); } case 1641152943: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return ((global::haxe.lang.Function) (new global::haxe.lang.Closure(((object) (this) ), global::haxe.lang.Runtime.toString("InvokeRepeating"), ((int) (1641152943) ))) ); } case 1416948632: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return ((global::haxe.lang.Function) (new global::haxe.lang.Closure(((object) (this) ), global::haxe.lang.Runtime.toString("Invoke"), ((int) (1416948632) ))) ); } case 757431474: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return ((global::haxe.lang.Function) (new global::haxe.lang.Closure(((object) (this) ), global::haxe.lang.Runtime.toString("CancelInvoke"), ((int) (757431474) ))) ); } case 896046654: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return this.useGUILayout; } case 999946793: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return ((global::haxe.lang.Function) (new global::haxe.lang.Closure(((object) (this) ), global::haxe.lang.Runtime.toString("Update"), ((int) (999946793) ))) ); } case 1013599017: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return this.direct; } case 23697287: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return this.speed; } case 916069463: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return this.withTimeScale; } default: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return this.__hx_lookupField(field, hash, throwErrors, isCheck); } } } #line default } public virtual double __hx_getField_f(string field, int hash, bool throwErrors, bool handleProperties) { unchecked { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" switch (hash) { case 23697287: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return ((double) (this.speed) ); } default: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return this.__hx_lookupField_f(field, hash, throwErrors); } } } #line default } public virtual object __hx_invokeField(string field, int hash, global::Array dynargs) { unchecked { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" switch (hash) { case 757431474:case 1416948632:case 1641152943:case 602588383:case 987108662:case 832859768:case 1856815770:case 2084823382:case 2134927590:case 89600725:case 1723652455:case 1328964235:case 2122408236:case 967979664:case 139469119:case 294420221:case 1955029599:case 295397041:case 276486854:case 304123084:case 1826409040: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return global::haxe.lang.Runtime.slowCallField(this, field, dynargs); } case 999946793: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" this.Update(); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" break; } default: { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return ((global::haxe.lang.Function) (this.__hx_getField(field, hash, true, false, false)) ).__hx_invokeDynamic(dynargs); } } #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" return default(object); } #line default } public virtual void __hx_getFields(global::Array<object> baseArr) { unchecked { #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" baseArr.push("hideFlags"); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" baseArr.push("name"); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" baseArr.push("tag"); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" baseArr.push("active"); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" baseArr.push("gameObject"); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" baseArr.push("particleSystem"); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" baseArr.push("particleEmitter"); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" baseArr.push("hingeJoint"); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" baseArr.push("collider"); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" baseArr.push("guiTexture"); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" baseArr.push("guiElement"); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" baseArr.push("networkView"); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" baseArr.push("guiText"); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" baseArr.push("audio"); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" baseArr.push("renderer"); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" baseArr.push("constantForce"); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" baseArr.push("animation"); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" baseArr.push("light"); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" baseArr.push("camera"); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" baseArr.push("rigidbody"); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" baseArr.push("transform"); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" baseArr.push("enabled"); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" baseArr.push("useGUILayout"); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" baseArr.push("direct"); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" baseArr.push("speed"); #line 40 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/scene/ucore/RotorUCore.hx" baseArr.push("withTimeScale"); } #line default } } }
/* Copyright (c) 2012-2016 The ANTLR Project. All rights reserved. * Use of this file is governed by the BSD 3-clause license that * can be found in the LICENSE.txt file in the project root. */ using System; using System.Text; using Antlr4.Runtime; using Antlr4.Runtime.Misc; using Antlr4.Runtime.Sharpen; namespace Antlr4.Runtime { public class UnbufferedTokenStream : ITokenStream { private ITokenSource _tokenSource; /// <summary>A moving window buffer of the data being scanned.</summary> /// <remarks> /// A moving window buffer of the data being scanned. While there's a marker, /// we keep adding to buffer. Otherwise, /// <see cref="Consume()">consume()</see> /// resets so /// we start filling at index 0 again. /// </remarks> protected internal IToken[] tokens; /// <summary> /// The number of tokens currently in /// <see cref="tokens">tokens</see> /// . /// <p>This is not the buffer capacity, that's /// <c>tokens.length</c> /// .</p> /// </summary> protected internal int n; /// <summary> /// 0..n-1 index into /// <see cref="tokens">tokens</see> /// of next token. /// <p>The /// <c>LT(1)</c> /// token is /// <c>tokens[p]</c> /// . If /// <c>p == n</c> /// , we are /// out of buffered tokens.</p> /// </summary> protected internal int p = 0; /// <summary> /// Count up with /// <see cref="Mark()">mark()</see> /// and down with /// <see cref="Release(int)">release()</see> /// . When we /// <c>release()</c> /// the last mark, /// <c>numMarkers</c> /// reaches 0 and we reset the buffer. Copy /// <c>tokens[p]..tokens[n-1]</c> /// to /// <c>tokens[0]..tokens[(n-1)-p]</c> /// . /// </summary> protected internal int numMarkers = 0; /// <summary> /// This is the /// <c>LT(-1)</c> /// token for the current position. /// </summary> protected internal IToken lastToken; /// <summary> /// When /// <c>numMarkers &gt; 0</c> /// , this is the /// <c>LT(-1)</c> /// token for the /// first token in /// <see cref="tokens"/> /// . Otherwise, this is /// <see langword="null"/> /// . /// </summary> protected internal IToken lastTokenBufferStart; /// <summary>Absolute token index.</summary> /// <remarks> /// Absolute token index. It's the index of the token about to be read via /// <c>LT(1)</c> /// . Goes from 0 to the number of tokens in the entire stream, /// although the stream size is unknown before the end is reached. /// <p>This value is used to set the token indexes if the stream provides tokens /// that implement /// <see cref="IWritableToken"/> /// .</p> /// </remarks> protected internal int currentTokenIndex = 0; public UnbufferedTokenStream(ITokenSource tokenSource) : this(tokenSource, 256) { } public UnbufferedTokenStream(ITokenSource tokenSource, int bufferSize) { this.TokenSource = tokenSource; this.tokens = new IToken[bufferSize]; n = 0; Fill(1); } // prime the pump public virtual IToken Get(int i) { int bufferStartIndex = GetBufferStartIndex(); if (i < bufferStartIndex || i >= bufferStartIndex + n) { throw new ArgumentOutOfRangeException("get(" + i + ") outside buffer: " + bufferStartIndex + ".." + (bufferStartIndex + n)); } return tokens[i - bufferStartIndex]; } public virtual IToken LT(int i) { if (i == -1) { return lastToken; } Sync(i); int index = p + i - 1; if (index < 0) { throw new ArgumentOutOfRangeException("LT(" + i + ") gives negative index"); } if (index >= n) { System.Diagnostics.Debug.Assert(n > 0 && tokens[n - 1].Type == TokenConstants.EOF); return tokens[n - 1]; } return tokens[index]; } public virtual int LA(int i) { return LT(i).Type; } public virtual ITokenSource TokenSource { get { return _tokenSource; } set { _tokenSource = value; } } [return: NotNull] public virtual string GetText() { return string.Empty; } [return: NotNull] public virtual string GetText(RuleContext ctx) { return GetText(ctx.SourceInterval); } [return: NotNull] public virtual string GetText(IToken start, IToken stop) { if (start != null && stop != null) { return GetText(Interval.Of(start.TokenIndex, stop.TokenIndex)); } throw new NotSupportedException("The specified start and stop symbols are not supported."); } public virtual void Consume() { if (LA(1) == TokenConstants.EOF) { throw new InvalidOperationException("cannot consume EOF"); } // buf always has at least tokens[p==0] in this method due to ctor lastToken = tokens[p]; // track last token for LT(-1) // if we're at last token and no markers, opportunity to flush buffer if (p == n - 1 && numMarkers == 0) { n = 0; p = -1; // p++ will leave this at 0 lastTokenBufferStart = lastToken; } p++; currentTokenIndex++; Sync(1); } /// <summary> /// Make sure we have 'need' elements from current position /// <see cref="p">p</see> /// . Last valid /// <c>p</c> /// index is /// <c>tokens.length-1</c> /// . /// <c>p+need-1</c> /// is the tokens index 'need' elements /// ahead. If we need 1 element, /// <c>(p+1-1)==p</c> /// must be less than /// <c>tokens.length</c> /// . /// </summary> protected internal virtual void Sync(int want) { int need = (p + want - 1) - n + 1; // how many more elements we need? if (need > 0) { Fill(need); } } /// <summary> /// Add /// <paramref name="n"/> /// elements to the buffer. Returns the number of tokens /// actually added to the buffer. If the return value is less than /// <paramref name="n"/> /// , /// then EOF was reached before /// <paramref name="n"/> /// tokens could be added. /// </summary> protected internal virtual int Fill(int n) { for (int i = 0; i < n; i++) { if (this.n > 0 && tokens[this.n - 1].Type == TokenConstants.EOF) { return i; } IToken t = TokenSource.NextToken(); Add(t); } return n; } protected internal virtual void Add(IToken t) { if (n >= tokens.Length) { tokens = Arrays.CopyOf(tokens, tokens.Length * 2); } if (t is IWritableToken) { ((IWritableToken)t).TokenIndex = GetBufferStartIndex() + n; } tokens[n++] = t; } /// <summary>Return a marker that we can release later.</summary> /// <remarks> /// Return a marker that we can release later. /// <p>The specific marker value used for this class allows for some level of /// protection against misuse where /// <c>seek()</c> /// is called on a mark or /// <c>release()</c> /// is called in the wrong order.</p> /// </remarks> public virtual int Mark() { if (numMarkers == 0) { lastTokenBufferStart = lastToken; } int mark = -numMarkers - 1; numMarkers++; return mark; } public virtual void Release(int marker) { int expectedMark = -numMarkers; if (marker != expectedMark) { throw new InvalidOperationException("release() called with an invalid marker."); } numMarkers--; if (numMarkers == 0) { // can we release buffer? if (p > 0) { // Copy tokens[p]..tokens[n-1] to tokens[0]..tokens[(n-1)-p], reset ptrs // p is last valid token; move nothing if p==n as we have no valid char System.Array.Copy(tokens, p, tokens, 0, n - p); // shift n-p tokens from p to 0 n = n - p; p = 0; } lastTokenBufferStart = lastToken; } } public virtual int Index { get { return currentTokenIndex; } } public virtual void Seek(int index) { // seek to absolute index if (index == currentTokenIndex) { return; } if (index > currentTokenIndex) { Sync(index - currentTokenIndex); index = Math.Min(index, GetBufferStartIndex() + n - 1); } int bufferStartIndex = GetBufferStartIndex(); int i = index - bufferStartIndex; if (i < 0) { throw new ArgumentException("cannot seek to negative index " + index); } else { if (i >= n) { throw new NotSupportedException("seek to index outside buffer: " + index + " not in " + bufferStartIndex + ".." + (bufferStartIndex + n)); } } p = i; currentTokenIndex = index; if (p == 0) { lastToken = lastTokenBufferStart; } else { lastToken = tokens[p - 1]; } } public virtual int Size { get { throw new NotSupportedException("Unbuffered stream cannot know its size"); } } public virtual string SourceName { get { return TokenSource.SourceName; } } [return: NotNull] public virtual string GetText(Interval interval) { int bufferStartIndex = GetBufferStartIndex(); int bufferStopIndex = bufferStartIndex + tokens.Length - 1; int start = interval.a; int stop = interval.b; if (start < bufferStartIndex || stop > bufferStopIndex) { throw new NotSupportedException("interval " + interval + " not in token buffer window: " + bufferStartIndex + ".." + bufferStopIndex); } int a = start - bufferStartIndex; int b = stop - bufferStartIndex; StringBuilder buf = new StringBuilder(); for (int i = a; i <= b; i++) { IToken t = tokens[i]; buf.Append(t.Text); } return buf.ToString(); } protected internal int GetBufferStartIndex() { return currentTokenIndex - p; } } }
using System; using System.Collections; using System.Collections.Generic; using System.Threading.Tasks; using Orleans; namespace UnitTests.GrainInterfaces { using Orleans.Concurrency; namespace One { public class Command { } } namespace Two { public class Command { } } /// <summary> /// Repro for https://github.com/dotnet/orleans/issues/3713. /// Having multiple methods with the same name and same parameter type /// name would cause a code generation failure because only one of the /// methods would be implemented in the generated GrainReference. /// </summary> internal interface ISameNameParameterTypeGrain : IGrainWithIntegerKey { Task ExecuteCommand(One.Command command); Task ExecuteCommand(Two.Command command); } internal interface IInternalPingGrain : IGrainWithIntegerKey { Task Ping(); } public interface ISomeGrain : IGrainWithIntegerKey { Task Do(Outsider o); } [Unordered] public interface ISomeGrainWithInvocationOptions : IGrainWithIntegerKey { [AlwaysInterleave] Task AlwaysInterleave(); } public interface ISerializationGenerationGrain : IGrainWithIntegerKey { Task<object> RoundTripObject(object input); Task<SomeStruct> RoundTripStruct(SomeStruct input); Task<SomeAbstractClass> RoundTripClass(SomeAbstractClass input); Task<ISomeInterface> RoundTripInterface(ISomeInterface input); Task<SomeAbstractClass.SomeEnum> RoundTripEnum(SomeAbstractClass.SomeEnum input); Task SetState(SomeAbstractClass input); Task<SomeAbstractClass> GetState(); } } public class Outsider { } namespace UnitTests.GrainInterfaces { [Serializable] public class CaseInsensitiveStringEquality : EqualityComparer<string> { public override bool Equals(string x, string y) { return x.Equals(y, StringComparison.OrdinalIgnoreCase); } public override int GetHashCode(string obj) { return obj.ToLowerInvariant().GetHashCode(); } } [Serializable] public class Mod5IntegerComparer : EqualityComparer<int> { public override bool Equals(int x, int y) { return ((x - y) % 5) == 0; } public override int GetHashCode(int obj) { return obj % 5; } } [Serializable] public class CaseInsensitiveStringComparer : Comparer<string> { public override int Compare(string x, string y) { var x1 = x.ToLowerInvariant(); var y1 = y.ToLowerInvariant(); return Comparer<string>.Default.Compare(x1, y1); } } [Serializable] public class RootType { public RootType() { MyDictionary = new Dictionary<string, object>(); MyDictionary.Add("obj1", new InnerType()); MyDictionary.Add("obj2", new InnerType()); MyDictionary.Add("obj3", new InnerType()); MyDictionary.Add("obj4", new InnerType()); } public Dictionary<string, object> MyDictionary { get; set; } public override bool Equals(object obj) { var actual = obj as RootType; if (actual == null) { return false; } if (MyDictionary == null) return actual.MyDictionary == null; if (actual.MyDictionary == null) return false; var set1 = new HashSet<KeyValuePair<string, object>>(MyDictionary); var set2 = new HashSet<KeyValuePair<string, object>>(actual.MyDictionary); bool ret = set1.SetEquals(set2); return ret; } public override int GetHashCode() { return base.GetHashCode(); } } [Serializable] public struct SomeStruct { public Guid Id { get; set; } public int PublicValue { get; set; } public int ValueWithPrivateSetter { get; private set; } public int ValueWithPrivateGetter { private get; set; } private int PrivateValue { get; set; } public readonly int ReadonlyField; public IEchoGrain SomeGrainReference { get; set; } public SomeStruct(int readonlyField) : this() { this.ReadonlyField = readonlyField; } public int GetValueWithPrivateGetter() { return this.ValueWithPrivateGetter; } public int GetPrivateValue() { return this.PrivateValue; } public void SetPrivateValue(int value) { this.PrivateValue = value; } public void SetValueWithPrivateSetter(int value) { this.ValueWithPrivateSetter = value; } } public interface ISomeInterface { int Int { get; set; } } [Serializable] public abstract class SomeAbstractClass : ISomeInterface { [NonSerialized] private int nonSerializedIntField; public abstract int Int { get; set; } public List<ISomeInterface> Interfaces { get; set; } public SomeAbstractClass[] Classes { get; set; } [Obsolete("This field should not be serialized", true)] public int ObsoleteIntWithError { get; set; } [Obsolete("This field should be serialized")] public int ObsoleteInt { get; set; } public IEchoGrain SomeGrainReference { get; set; } #pragma warning disable 618 public int GetObsoleteInt() => this.ObsoleteInt; public void SetObsoleteInt(int value) { this.ObsoleteInt = value; } #pragma warning restore 618 public SomeEnum Enum { get; set; } public int NonSerializedInt { get { return this.nonSerializedIntField; } set { this.nonSerializedIntField = value; } } [Serializable] public enum SomeEnum { None, Something, SomethingElse } } public class OuterClass { public static SomeConcreteClass GetPrivateClassInstance() => new PrivateConcreteClass(Guid.NewGuid()); public static Type GetPrivateClassType() => typeof(PrivateConcreteClass); [Serializable] public class SomeConcreteClass : SomeAbstractClass { public override int Int { get; set; } public string String { get; set; } private PrivateConcreteClass secretPrivateClass; public void ConfigureSecretPrivateClass() { this.secretPrivateClass = new PrivateConcreteClass(Guid.NewGuid()); } public bool AreSecretBitsIdentitcal(SomeConcreteClass other) { return other.secretPrivateClass?.Identity == this.secretPrivateClass?.Identity; } } [Serializable] private class PrivateConcreteClass : SomeConcreteClass { public PrivateConcreteClass(Guid identity) { this.Identity = identity; } public readonly Guid Identity; } } [Serializable] public class AnotherConcreteClass : SomeAbstractClass { public override int Int { get; set; } public string AnotherString { get; set; } } [Serializable] public class InnerType { public InnerType() { Id = Guid.NewGuid(); Something = Id.ToString(); } public Guid Id { get; set; } public string Something { get; set; } public override bool Equals(object obj) { var actual = obj as InnerType; if (actual == null) { return false; } return Id.Equals(actual.Id) && Equals(Something, actual.Something); } public override int GetHashCode() { return base.GetHashCode(); } } [Serializable] public class ClassWithStructConstraint<T> where T : struct { public T Value { get; set; } } // This class should not have a serializer generated for it, since the serializer would not be able to access // the nested private class. [Serializable] public class ClassWithNestedPrivateClassInListField { private readonly List<NestedPrivateClass> coolBeans = new List<NestedPrivateClass> { new NestedPrivateClass() }; public IEnumerable CoolBeans => this.coolBeans; private class NestedPrivateClass { } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ using System; using System.Collections.Generic; using ParquetSharp.Column; using ParquetSharp.External; using ParquetSharp.Filter; using ParquetSharp.Filter2.Compat; using ParquetSharp.Hadoop.Api; using ParquetSharp.Hadoop.Metadata; using ParquetSharp.Hadoop.Util; using ParquetSharp.Hadoop.Util.Counters; using ParquetSharp.IO; using ParquetSharp.Schema; namespace ParquetSharp.Hadoop { /** * Reads the records from a block of a Parquet file * * @see ParquetInputFormat * * @author Julien Le Dem * * @param <T> type of the materialized records */ public class ParquetRecordReader<T> : RecordReader<object, T> { private static readonly Log LOG = Log.getLog(typeof(ParquetRecordReader<T>)); private readonly InternalParquetRecordReader<T> internalReader; /** * @param readSupport Object which helps reads files of the given type, e.g. Thrift, Avro. */ public ParquetRecordReader(ReadSupport<T> readSupport) : this(readSupport, FilterCompat.NOOP) { } /** * @param readSupport Object which helps reads files of the given type, e.g. Thrift, Avro. * @param filter for filtering individual records */ public ParquetRecordReader(ReadSupport<T> readSupport, FilterCompat.Filter filter) { internalReader = new InternalParquetRecordReader<T>(readSupport, filter); } /** * @param readSupport Object which helps reads files of the given type, e.g. Thrift, Avro. * @param filter for filtering individual records * [Obsolete] use {@link #ParquetRecordReader(ReadSupport, Filter)} */ [Obsolete] public ParquetRecordReader(ReadSupport<T> readSupport, UnboundRecordFilter filter) : this(readSupport, FilterCompat.get(filter)) { } /** * {@inheritDoc} */ public void close() { internalReader.close(); } /** * always returns null */ public object getCurrentKey() { return null; } /** * {@inheritDoc} */ public T getCurrentValue() { return internalReader.getCurrentValue(); } /** * {@inheritDoc} */ public float getProgress() { return internalReader.getProgress(); } /** * {@inheritDoc} */ public void initialize(InputSplit inputSplit, TaskAttemptContext context) { if (context is TaskInputOutputContext) { BenchmarkCounter.initCounterFromContext((TaskInputOutputContext)context); } else { LOG.error("Can not initialize counter due to context is not a instance of TaskInputOutputContext, but is " + context.getClass().getCanonicalName()); } initializeInternalReader(toParquetSplit(inputSplit), ContextUtil.getConfiguration(context)); } public void initialize(InputSplit inputSplit, Configuration configuration, Reporter reporter) { BenchmarkCounter.initCounterFromReporter(reporter, configuration); initializeInternalReader(toParquetSplit(inputSplit), configuration); } private void initializeInternalReader(ParquetInputSplit split, Configuration configuration) { Path path = split.getPath(); long[] rowGroupOffsets = split.getRowGroupOffsets(); List<BlockMetaData> filteredBlocks; ParquetMetadata footer; // if task.side.metadata is set, rowGroupOffsets is null if (rowGroupOffsets == null) { // then we need to apply the predicate push down filter footer = readFooter(configuration, path, range(split.getStart(), split.getEnd())); MessageType fileSchema = footer.getFileMetaData().getSchema(); FilterCompat.Filter filter = getFilter(configuration); filteredBlocks = filterRowGroups(filter, footer.getBlocks(), fileSchema); } else { // otherwise we find the row groups that were selected on the client footer = readFooter(configuration, path, NO_FILTER); HashSet<long> offsets = new HashSet<long>(); foreach (long offset in rowGroupOffsets) { offsets.Add(offset); } filteredBlocks = new List<BlockMetaData>(); foreach (BlockMetaData block in footer.getBlocks()) { if (offsets.Contains(block.getStartingPos())) { filteredBlocks.Add(block); } } // verify we found them all if (filteredBlocks.Count != rowGroupOffsets.Length) { long[] foundRowGroupOffsets = new long[footer.getBlocks().Count]; for (int i = 0; i < foundRowGroupOffsets.Length; i++) { foundRowGroupOffsets[i] = footer.getBlocks()[i].getStartingPos(); } // this should never happen. // provide a good error message in case there's a bug throw new IllegalStateException( "All the offsets listed in the split should be found in the file." + " expected: " + Arrays.toString(rowGroupOffsets) + " found: " + filteredBlocks + " out of: " + Arrays.toString(foundRowGroupOffsets) + " in range " + split.getStart() + ", " + split.getEnd()); } } if (!filteredBlocks.isEmpty()) { checkDeltaByteArrayProblem(footer.getFileMetaData(), configuration, filteredBlocks[0]); } MessageType fileSchema = footer.getFileMetaData().getSchema(); internalReader.initialize( fileSchema, footer.getFileMetaData(), path, filteredBlocks, configuration); } private void checkDeltaByteArrayProblem(FileMetaData meta, Configuration conf, BlockMetaData block) { // splitting files? if (conf.getBoolean(ParquetInputFormat.SPLIT_FILES, true)) { // this is okay if not using DELTA_BYTE_ARRAY with the bug HashSet<Encoding> encodings = new HashSet<Encoding>(); foreach (ColumnChunkMetaData column in block.getColumns()) { encodings.UnionWith(column.getEncodings()); } foreach (Encoding encoding in encodings) { if (CorruptDeltaByteArrays.requiresSequentialReads(meta.getCreatedBy(), encoding)) { throw new ParquetDecodingException("Cannot read data due to " + "PARQUET-246: to read safely, set " + SPLIT_FILES + " to false"); } } } } /** * {@inheritDoc} */ public bool nextKeyValue() { return internalReader.nextKeyValue(); } private ParquetInputSplit toParquetSplit(InputSplit split) { if (split is ParquetInputSplit) { return (ParquetInputSplit)split; } else if (split is FileSplit) { return ParquetInputSplit.from((FileSplit)split); } else if (split is org.apache.hadoop.mapred.FileSplit) { return ParquetInputSplit.from( (org.apache.hadoop.mapred.FileSplit)split); } else { throw new ArgumentException( "Invalid split (not a FileSplit or ParquetInputSplit): " + split); } } } }
using System; using System.Collections; using System.Data; using MbUnit.Core.Framework; using MbUnit.Framework; using TestFu.Grammars; using TestFu.Data; using TestFu.Data.Collections; namespace TestFu.Tests.Data.Collections { using TestFu.Tests.Data.Generators; [GrammarFixture] [FixtureCategory("Current")] public class DataGeneratorCollectionGrammar : Grammar { private IDataGeneratorCollection col=new DataGeneratorCollection(); private AllDataGeneratorFactory factory = new AllDataGeneratorFactory(); private ArrayList oracle = new ArrayList(); private IRule add; private IRule remove; private IRule removeColumn; private IRule removeColumnName; private IRule clear; private IRule guardRemove; private IRule guardRemoveColumn; private IRule guardRemoveColumnName; private IRule nonEmpty; private IRule empty; private IRule modify; public DataGeneratorCollectionGrammar() { // methods this.add = Rules.Method(new MethodInvoker(this.Add)); this.remove = Rules.Method(new MethodInvoker(this.Remove)); this.removeColumn = Rules.Method(new MethodInvoker(this.RemoveColumn)); this.removeColumnName = Rules.Method(new MethodInvoker(this.RemoveColumnName)); this.clear = Rules.Method(new MethodInvoker(this.Clear)); // guarded methods this.guardRemove = Rules.Guard( this.remove, typeof(InvalidOperationException) ); this.guardRemoveColumn = Rules.Guard( this.removeColumn, typeof(InvalidOperationException) ); this.guardRemoveColumnName = Rules.Guard( this.removeColumnName, typeof(InvalidOperationException) ); // high order rules this.nonEmpty = Rules.Alt( this.add, this.clear, this.remove, this.removeColumn, this.removeColumnName ); this.empty = Rules.Alt( this.add, this.clear, this.guardRemove, this.guardRemoveColumn, this.guardRemoveColumnName ); this.modify = Rules.If(new ConditionDelegate(this.IsEmpty), empty, nonEmpty); this.StartRule = Rules.Kleene(this.modify); } #region Rules public bool IsEmpty(IProductionToken token) { return this.col.Count==0; } public void Add() { IDataGenerator gen = this.factory.GetRandomGenerator(); if (col.Contains(gen)) { try { Console.WriteLine("Add({0}) - duplicate",gen); col.Add(gen); } catch(ArgumentException) {} } else { Console.WriteLine("Add({0})",gen); int count = col.Count; col.Add(gen); oracle.Add(gen); Assert.AreEqual(count+1,col.Count); } this.Check(); } public void Remove() { IDataGenerator gen = this.factory.GetRandomGenerator(); Console.WriteLine("Remove({0})",gen); int count = col.Count; if (col.Contains(gen)) count--; col.Remove(gen); oracle.Remove(gen); Assert.AreEqual(count,col.Count); Assert.IsFalse(col.Contains(gen)); Assert.IsFalse(col.Contains(gen.Column)); Assert.IsFalse(col.Contains(gen.Column.ColumnName)); this.Check(); } public void RemoveColumn() { IDataGenerator gen = this.factory.GetRandomGenerator(); Console.WriteLine("RemoveColumn({0})",gen); int count = col.Count; if (col.Contains(gen)) count--; col.Remove(gen.Column); oracle.Remove(gen); Assert.AreEqual(count,col.Count); Assert.IsFalse(col.Contains(gen)); Assert.IsFalse(col.Contains(gen.Column)); Assert.IsFalse(col.Contains(gen.Column.ColumnName)); this.Check(); } public void RemoveColumnName() { IDataGenerator gen = this.factory.GetRandomGenerator(); Console.WriteLine("RemoveColumnName({0})",gen); int count = col.Count; if (col.Contains(gen)) count--; col.Remove(gen.Column.ColumnName); oracle.Remove(gen); Assert.AreEqual(count,col.Count); Assert.IsFalse(col.Contains(gen)); Assert.IsFalse(col.Contains(gen.Column)); Assert.IsFalse(col.Contains(gen.Column.ColumnName)); this.Check(); } public void Clear() { Console.WriteLine("Clear()"); col.Clear(); oracle.Clear(); this.Check(); } public void Check() { // checking count Assert.AreEqual(oracle.Count,col.Count); CollectionAssert.IsCountCorrect(col); // checking elements foreach(IDataGenerator gen in this.factory.Generators) { bool contains = this.oracle.Contains(gen); if (contains) { // check contains Assert.IsTrue(col.Contains(gen)); Assert.IsTrue(col.Contains(gen.Column)); Assert.IsTrue(col.Contains(gen.Column.ColumnName)); // check item Assert.AreEqual(gen,col[gen.Column]); Assert.AreEqual(gen,col[gen.Column.ColumnName]); } else { Assert.IsFalse(col.Contains(gen)); Assert.IsFalse(col.Contains(gen.Column)); Assert.IsFalse(col.Contains(gen.Column.ColumnName)); Assert.AreEqual(null,col[gen.Column]); Assert.AreEqual(null,col[gen.Column.ColumnName]); } } } #endregion #region Production execution [Grammar] public Grammar This() { return this; } [Seed] public int Seed10() { return 10; } [Seed] public int Seed50() { return 50; } [Seed] public int Seed200() { return 200; } #endregion } }
using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Reflection; namespace Fabrikam.Module1.Uc1.Query.Services.WebApi.v1.Areas.HelpPage { /// <summary> /// This class will create an object of a given type and populate it with sample data. /// </summary> public class ObjectGenerator { internal const int DefaultCollectionSize = 2; private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator(); /// <summary> /// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types: /// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc. /// Complex types: POCO types. /// Nullables: <see cref="Nullable{T}"/>. /// Arrays: arrays of simple types or complex types. /// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/> /// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc /// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>. /// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>. /// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>An object of the given type.</returns> public object GenerateObject(Type type) { return GenerateObject(type, new Dictionary<Type, object>()); } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")] private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences) { try { if (SimpleTypeObjectGenerator.CanGenerateObject(type)) { return SimpleObjectGenerator.GenerateObject(type); } if (type.IsArray) { return GenerateArray(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsGenericType) { return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IDictionary)) { return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences); } if (typeof(IDictionary).IsAssignableFrom(type)) { return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IList) || type == typeof(IEnumerable) || type == typeof(ICollection)) { return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences); } if (typeof(IList).IsAssignableFrom(type)) { return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IQueryable)) { return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsEnum) { return GenerateEnum(type); } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } } catch { // Returns null if anything fails return null; } return null; } private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences) { Type genericTypeDefinition = type.GetGenericTypeDefinition(); if (genericTypeDefinition == typeof(Nullable<>)) { return GenerateNullable(type, createdObjectReferences); } if (genericTypeDefinition == typeof(KeyValuePair<,>)) { return GenerateKeyValuePair(type, createdObjectReferences); } if (IsTuple(genericTypeDefinition)) { return GenerateTuple(type, createdObjectReferences); } Type[] genericArguments = type.GetGenericArguments(); if (genericArguments.Length == 1) { if (genericTypeDefinition == typeof(IList<>) || genericTypeDefinition == typeof(IEnumerable<>) || genericTypeDefinition == typeof(ICollection<>)) { Type collectionType = typeof(List<>).MakeGenericType(genericArguments); return GenerateCollection(collectionType, collectionSize, createdObjectReferences); } if (genericTypeDefinition == typeof(IQueryable<>)) { return GenerateQueryable(type, collectionSize, createdObjectReferences); } Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]); if (closedCollectionType.IsAssignableFrom(type)) { return GenerateCollection(type, collectionSize, createdObjectReferences); } } if (genericArguments.Length == 2) { if (genericTypeDefinition == typeof(IDictionary<,>)) { Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments); return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences); } Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]); if (closedDictionaryType.IsAssignableFrom(type)) { return GenerateDictionary(type, collectionSize, createdObjectReferences); } } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } return null; } private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = type.GetGenericArguments(); object[] parameterValues = new object[genericArgs.Length]; bool failedToCreateTuple = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < genericArgs.Length; i++) { parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences); failedToCreateTuple &= parameterValues[i] == null; } if (failedToCreateTuple) { return null; } object result = Activator.CreateInstance(type, parameterValues); return result; } private static bool IsTuple(Type genericTypeDefinition) { return genericTypeDefinition == typeof(Tuple<>) || genericTypeDefinition == typeof(Tuple<,>) || genericTypeDefinition == typeof(Tuple<,,>) || genericTypeDefinition == typeof(Tuple<,,,>) || genericTypeDefinition == typeof(Tuple<,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,,>); } private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = keyValuePairType.GetGenericArguments(); Type typeK = genericArgs[0]; Type typeV = genericArgs[1]; ObjectGenerator objectGenerator = new ObjectGenerator(); object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences); object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences); if (keyObject == null && valueObject == null) { // Failed to create key and values return null; } object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject); return result; } private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = arrayType.GetElementType(); Array result = Array.CreateInstance(type, size); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); result.SetValue(element, i); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences) { Type typeK = typeof(object); Type typeV = typeof(object); if (dictionaryType.IsGenericType) { Type[] genericArgs = dictionaryType.GetGenericArguments(); typeK = genericArgs[0]; typeV = genericArgs[1]; } object result = Activator.CreateInstance(dictionaryType); MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd"); MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey"); ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences); if (newKey == null) { // Cannot generate a valid key return null; } bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey }); if (!containsKey) { object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences); addMethod.Invoke(result, new object[] { newKey, newValue }); } } return result; } private static object GenerateEnum(Type enumType) { Array possibleValues = Enum.GetValues(enumType); if (possibleValues.Length > 0) { return possibleValues.GetValue(0); } return null; } private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences) { bool isGeneric = queryableType.IsGenericType; object list; if (isGeneric) { Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments()); list = GenerateCollection(listType, size, createdObjectReferences); } else { list = GenerateArray(typeof(object[]), size, createdObjectReferences); } if (list == null) { return null; } if (isGeneric) { Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments()); MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType }); return asQueryableMethod.Invoke(null, new[] { list }); } return Queryable.AsQueryable((IEnumerable)list); } private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = collectionType.IsGenericType ? collectionType.GetGenericArguments()[0] : typeof(object); object result = Activator.CreateInstance(collectionType); MethodInfo addMethod = collectionType.GetMethod("Add"); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); addMethod.Invoke(result, new object[] { element }); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences) { Type type = nullableType.GetGenericArguments()[0]; ObjectGenerator objectGenerator = new ObjectGenerator(); return objectGenerator.GenerateObject(type, createdObjectReferences); } private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences) { object result = null; if (createdObjectReferences.TryGetValue(type, out result)) { // The object has been created already, just return it. This will handle the circular reference case. return result; } if (type.IsValueType) { result = Activator.CreateInstance(type); } else { ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes); if (defaultCtor == null) { // Cannot instantiate the type because it doesn't have a default constructor return null; } result = defaultCtor.Invoke(new object[0]); } createdObjectReferences.Add(type, result); SetPublicProperties(type, result, createdObjectReferences); SetPublicFields(type, result, createdObjectReferences); return result; } private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (PropertyInfo property in properties) { if (property.CanWrite) { object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences); property.SetValue(obj, propertyValue, null); } } } private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (FieldInfo field in fields) { object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences); field.SetValue(obj, fieldValue); } } private class SimpleTypeObjectGenerator { private long _index = 0; private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators(); [SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")] private static Dictionary<Type, Func<long, object>> InitializeGenerators() { return new Dictionary<Type, Func<long, object>> { { typeof(Boolean), index => true }, { typeof(Byte), index => (Byte)64 }, { typeof(Char), index => (Char)65 }, { typeof(DateTime), index => DateTime.Now }, { typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) }, { typeof(DBNull), index => DBNull.Value }, { typeof(Decimal), index => (Decimal)index }, { typeof(Double), index => (Double)(index + 0.1) }, { typeof(Guid), index => Guid.NewGuid() }, { typeof(Int16), index => (Int16)(index % Int16.MaxValue) }, { typeof(Int32), index => (Int32)(index % Int32.MaxValue) }, { typeof(Int64), index => (Int64)index }, { typeof(Object), index => new object() }, { typeof(SByte), index => (SByte)64 }, { typeof(Single), index => (Single)(index + 0.1) }, { typeof(String), index => { return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index); } }, { typeof(TimeSpan), index => { return TimeSpan.FromTicks(1234567); } }, { typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) }, { typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) }, { typeof(UInt64), index => (UInt64)index }, { typeof(Uri), index => { return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index)); } }, }; } public static bool CanGenerateObject(Type type) { return DefaultGenerators.ContainsKey(type); } public object GenerateObject(Type type) { return DefaultGenerators[type](++_index); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /****************************************************************************** * This file is auto-generated from a template file by the GenerateTests.csx * * script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make * * changes, please update the corresponding template and run according to the * * directions listed in the file. * ******************************************************************************/ using System; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.Intrinsics; using System.Runtime.Intrinsics.X86; using static System.Runtime.Intrinsics.X86.Sse; using static System.Runtime.Intrinsics.X86.Sse2; namespace JIT.HardwareIntrinsics.X86 { public static partial class Program { private static void InsertVector128UInt321() { var test = new InsertVector128Test__InsertVector128UInt321(); if (test.IsSupported) { // Validates basic functionality works, using Unsafe.Read test.RunBasicScenario_UnsafeRead(); if (Avx.IsSupported) { // Validates basic functionality works, using Load test.RunBasicScenario_Load(); // Validates basic functionality works, using LoadAligned test.RunBasicScenario_LoadAligned(); } // Validates calling via reflection works, using Unsafe.Read test.RunReflectionScenario_UnsafeRead(); if (Avx.IsSupported) { // Validates calling via reflection works, using Load test.RunReflectionScenario_Load(); // Validates calling via reflection works, using LoadAligned test.RunReflectionScenario_LoadAligned(); } // Validates passing a static member works test.RunClsVarScenario(); // Validates passing a local works, using Unsafe.Read test.RunLclVarScenario_UnsafeRead(); if (Avx.IsSupported) { // Validates passing a local works, using Load test.RunLclVarScenario_Load(); // Validates passing a local works, using LoadAligned test.RunLclVarScenario_LoadAligned(); } // Validates passing the field of a local class works test.RunClassLclFldScenario(); // Validates passing an instance member of a class works test.RunClassFldScenario(); // Validates passing the field of a local struct works test.RunStructLclFldScenario(); // Validates passing an instance member of a struct works test.RunStructFldScenario(); } else { // Validates we throw on unsupported hardware test.RunUnsupportedScenario(); } if (!test.Succeeded) { throw new Exception("One or more scenarios did not complete as expected."); } } } public sealed unsafe class InsertVector128Test__InsertVector128UInt321 { private struct TestStruct { public Vector256<UInt32> _fld1; public Vector128<UInt32> _fld2; public static TestStruct Create() { var testStruct = new TestStruct(); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt32(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt32>, byte>(ref testStruct._fld1), ref Unsafe.As<UInt32, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<UInt32>>()); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt32(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref testStruct._fld2), ref Unsafe.As<UInt32, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>()); return testStruct; } public void RunStructFldScenario(InsertVector128Test__InsertVector128UInt321 testClass) { var result = Avx2.InsertVector128(_fld1, _fld2, 1); Unsafe.Write(testClass._dataTable.outArrayPtr, result); testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr); } } private static readonly int LargestVectorSize = 32; private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector256<UInt32>>() / sizeof(UInt32); private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector128<UInt32>>() / sizeof(UInt32); private static readonly int RetElementCount = Unsafe.SizeOf<Vector256<UInt32>>() / sizeof(UInt32); private static UInt32[] _data1 = new UInt32[Op1ElementCount]; private static UInt32[] _data2 = new UInt32[Op2ElementCount]; private static Vector256<UInt32> _clsVar1; private static Vector128<UInt32> _clsVar2; private Vector256<UInt32> _fld1; private Vector128<UInt32> _fld2; private SimpleBinaryOpTest__DataTable<UInt32, UInt32, UInt32> _dataTable; static InsertVector128Test__InsertVector128UInt321() { for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt32(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt32>, byte>(ref _clsVar1), ref Unsafe.As<UInt32, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<UInt32>>()); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt32(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref _clsVar2), ref Unsafe.As<UInt32, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>()); } public InsertVector128Test__InsertVector128UInt321() { Succeeded = true; for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt32(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt32>, byte>(ref _fld1), ref Unsafe.As<UInt32, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<UInt32>>()); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt32(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt32>, byte>(ref _fld2), ref Unsafe.As<UInt32, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<UInt32>>()); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt32(); } for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt32(); } _dataTable = new SimpleBinaryOpTest__DataTable<UInt32, UInt32, UInt32>(_data1, _data2, new UInt32[RetElementCount], LargestVectorSize); } public bool IsSupported => Avx2.IsSupported; public bool Succeeded { get; set; } public void RunBasicScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead)); var result = Avx2.InsertVector128( Unsafe.Read<Vector256<UInt32>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector128<UInt32>>(_dataTable.inArray2Ptr), 1 ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load)); var result = Avx2.InsertVector128( Avx.LoadVector256((UInt32*)(_dataTable.inArray1Ptr)), LoadVector128((UInt32*)(_dataTable.inArray2Ptr)), 1 ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_LoadAligned() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_LoadAligned)); var result = Avx2.InsertVector128( Avx.LoadAlignedVector256((UInt32*)(_dataTable.inArray1Ptr)), LoadAlignedVector128((UInt32*)(_dataTable.inArray2Ptr)), 1 ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead)); var result = typeof(Avx2).GetMethod(nameof(Avx2.InsertVector128), new Type[] { typeof(Vector256<UInt32>), typeof(Vector128<UInt32>), typeof(byte) }) .Invoke(null, new object[] { Unsafe.Read<Vector256<UInt32>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector128<UInt32>>(_dataTable.inArray2Ptr), (byte)1 }); Unsafe.Write(_dataTable.outArrayPtr, (Vector256<UInt32>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load)); var result = typeof(Avx2).GetMethod(nameof(Avx2.InsertVector128), new Type[] { typeof(Vector256<UInt32>), typeof(Vector128<UInt32>), typeof(byte) }) .Invoke(null, new object[] { Avx.LoadVector256((UInt32*)(_dataTable.inArray1Ptr)), LoadVector128((UInt32*)(_dataTable.inArray2Ptr)), (byte)1 }); Unsafe.Write(_dataTable.outArrayPtr, (Vector256<UInt32>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_LoadAligned() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_LoadAligned)); var result = typeof(Avx2).GetMethod(nameof(Avx2.InsertVector128), new Type[] { typeof(Vector256<UInt32>), typeof(Vector128<UInt32>), typeof(byte) }) .Invoke(null, new object[] { Avx.LoadAlignedVector256((UInt32*)(_dataTable.inArray1Ptr)), LoadAlignedVector128((UInt32*)(_dataTable.inArray2Ptr)), (byte)1 }); Unsafe.Write(_dataTable.outArrayPtr, (Vector256<UInt32>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunClsVarScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario)); var result = Avx2.InsertVector128( _clsVar1, _clsVar2, 1 ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr); } public void RunLclVarScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead)); var left = Unsafe.Read<Vector256<UInt32>>(_dataTable.inArray1Ptr); var right = Unsafe.Read<Vector128<UInt32>>(_dataTable.inArray2Ptr); var result = Avx2.InsertVector128(left, right, 1); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(left, right, _dataTable.outArrayPtr); } public void RunLclVarScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load)); var left = Avx.LoadVector256((UInt32*)(_dataTable.inArray1Ptr)); var right = LoadVector128((UInt32*)(_dataTable.inArray2Ptr)); var result = Avx2.InsertVector128(left, right, 1); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(left, right, _dataTable.outArrayPtr); } public void RunLclVarScenario_LoadAligned() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_LoadAligned)); var left = Avx.LoadAlignedVector256((UInt32*)(_dataTable.inArray1Ptr)); var right = LoadAlignedVector128((UInt32*)(_dataTable.inArray2Ptr)); var result = Avx2.InsertVector128(left, right, 1); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(left, right, _dataTable.outArrayPtr); } public void RunClassLclFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario)); var test = new InsertVector128Test__InsertVector128UInt321(); var result = Avx2.InsertVector128(test._fld1, test._fld2, 1); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } public void RunClassFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario)); var result = Avx2.InsertVector128(_fld1, _fld2, 1); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr); } public void RunStructLclFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario)); var test = TestStruct.Create(); var result = Avx2.InsertVector128(test._fld1, test._fld2, 1); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } public void RunStructFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario)); var test = TestStruct.Create(); test.RunStructFldScenario(this); } public void RunUnsupportedScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario)); bool succeeded = false; try { RunBasicScenario_UnsafeRead(); } catch (PlatformNotSupportedException) { succeeded = true; } if (!succeeded) { Succeeded = false; } } private void ValidateResult(Vector256<UInt32> left, Vector128<UInt32> right, void* result, [CallerMemberName] string method = "") { UInt32[] inArray1 = new UInt32[Op1ElementCount]; UInt32[] inArray2 = new UInt32[Op2ElementCount]; UInt32[] outArray = new UInt32[RetElementCount]; Unsafe.WriteUnaligned(ref Unsafe.As<UInt32, byte>(ref inArray1[0]), left); Unsafe.WriteUnaligned(ref Unsafe.As<UInt32, byte>(ref inArray2[0]), right); Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt32, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector256<UInt32>>()); ValidateResult(inArray1, inArray2, outArray, method); } private void ValidateResult(void* left, void* right, void* result, [CallerMemberName] string method = "") { UInt32[] inArray1 = new UInt32[Op1ElementCount]; UInt32[] inArray2 = new UInt32[Op2ElementCount]; UInt32[] outArray = new UInt32[RetElementCount]; Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt32, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(left), (uint)Unsafe.SizeOf<Vector256<UInt32>>()); Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt32, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(right), (uint)Unsafe.SizeOf<Vector128<UInt32>>()); Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt32, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector256<UInt32>>()); ValidateResult(inArray1, inArray2, outArray, method); } private void ValidateResult(UInt32[] left, UInt32[] right, UInt32[] result, [CallerMemberName] string method = "") { bool succeeded = true; if (result[0] != left[0]) { succeeded = false; } else { for (var i = 1; i < RetElementCount; i++) { if ((i > 3 ? result[i] != right[i - 4] : result[i] != left[i])) { succeeded = false; break; } } } if (!succeeded) { TestLibrary.TestFramework.LogInformation($"{nameof(Avx2)}.{nameof(Avx2.InsertVector128)}<UInt32>(Vector256<UInt32>, Vector128<UInt32>.1): {method} failed:"); TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})"); TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})"); TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})"); TestLibrary.TestFramework.LogInformation(string.Empty); Succeeded = false; } } } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gagvr = Google.Ads.GoogleAds.V8.Resources; using gax = Google.Api.Gax; using sys = System; namespace Google.Ads.GoogleAds.V8.Resources { /// <summary>Resource name for the <c>MobileDeviceConstant</c> resource.</summary> public sealed partial class MobileDeviceConstantName : gax::IResourceName, sys::IEquatable<MobileDeviceConstantName> { /// <summary>The possible contents of <see cref="MobileDeviceConstantName"/>.</summary> public enum ResourceNameType { /// <summary>An unparsed resource name.</summary> Unparsed = 0, /// <summary>A resource name with pattern <c>mobileDeviceConstants/{criterion_id}</c>.</summary> Criterion = 1, } private static gax::PathTemplate s_criterion = new gax::PathTemplate("mobileDeviceConstants/{criterion_id}"); /// <summary>Creates a <see cref="MobileDeviceConstantName"/> containing an unparsed resource name.</summary> /// <param name="unparsedResourceName">The unparsed resource name. Must not be <c>null</c>.</param> /// <returns> /// A new instance of <see cref="MobileDeviceConstantName"/> containing the provided /// <paramref name="unparsedResourceName"/>. /// </returns> public static MobileDeviceConstantName FromUnparsed(gax::UnparsedResourceName unparsedResourceName) => new MobileDeviceConstantName(ResourceNameType.Unparsed, gax::GaxPreconditions.CheckNotNull(unparsedResourceName, nameof(unparsedResourceName))); /// <summary> /// Creates a <see cref="MobileDeviceConstantName"/> with the pattern <c>mobileDeviceConstants/{criterion_id}</c> /// . /// </summary> /// <param name="criterionId">The <c>Criterion</c> ID. Must not be <c>null</c> or empty.</param> /// <returns> /// A new instance of <see cref="MobileDeviceConstantName"/> constructed from the provided ids. /// </returns> public static MobileDeviceConstantName FromCriterion(string criterionId) => new MobileDeviceConstantName(ResourceNameType.Criterion, criterionId: gax::GaxPreconditions.CheckNotNullOrEmpty(criterionId, nameof(criterionId))); /// <summary> /// Formats the IDs into the string representation of this <see cref="MobileDeviceConstantName"/> with pattern /// <c>mobileDeviceConstants/{criterion_id}</c>. /// </summary> /// <param name="criterionId">The <c>Criterion</c> ID. Must not be <c>null</c> or empty.</param> /// <returns> /// The string representation of this <see cref="MobileDeviceConstantName"/> with pattern /// <c>mobileDeviceConstants/{criterion_id}</c>. /// </returns> public static string Format(string criterionId) => FormatCriterion(criterionId); /// <summary> /// Formats the IDs into the string representation of this <see cref="MobileDeviceConstantName"/> with pattern /// <c>mobileDeviceConstants/{criterion_id}</c>. /// </summary> /// <param name="criterionId">The <c>Criterion</c> ID. Must not be <c>null</c> or empty.</param> /// <returns> /// The string representation of this <see cref="MobileDeviceConstantName"/> with pattern /// <c>mobileDeviceConstants/{criterion_id}</c>. /// </returns> public static string FormatCriterion(string criterionId) => s_criterion.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(criterionId, nameof(criterionId))); /// <summary> /// Parses the given resource name string into a new <see cref="MobileDeviceConstantName"/> instance. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item><description><c>mobileDeviceConstants/{criterion_id}</c></description></item> /// </list> /// </remarks> /// <param name="mobileDeviceConstantName">The resource name in string form. Must not be <c>null</c>.</param> /// <returns>The parsed <see cref="MobileDeviceConstantName"/> if successful.</returns> public static MobileDeviceConstantName Parse(string mobileDeviceConstantName) => Parse(mobileDeviceConstantName, false); /// <summary> /// Parses the given resource name string into a new <see cref="MobileDeviceConstantName"/> instance; optionally /// allowing an unparseable resource name. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item><description><c>mobileDeviceConstants/{criterion_id}</c></description></item> /// </list> /// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>. /// </remarks> /// <param name="mobileDeviceConstantName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="allowUnparsed"> /// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/> /// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is /// specified. /// </param> /// <returns>The parsed <see cref="MobileDeviceConstantName"/> if successful.</returns> public static MobileDeviceConstantName Parse(string mobileDeviceConstantName, bool allowUnparsed) => TryParse(mobileDeviceConstantName, allowUnparsed, out MobileDeviceConstantName result) ? result : throw new sys::ArgumentException("The given resource-name matches no pattern."); /// <summary> /// Tries to parse the given resource name string into a new <see cref="MobileDeviceConstantName"/> instance. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item><description><c>mobileDeviceConstants/{criterion_id}</c></description></item> /// </list> /// </remarks> /// <param name="mobileDeviceConstantName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="result"> /// When this method returns, the parsed <see cref="MobileDeviceConstantName"/>, or <c>null</c> if parsing /// failed. /// </param> /// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns> public static bool TryParse(string mobileDeviceConstantName, out MobileDeviceConstantName result) => TryParse(mobileDeviceConstantName, false, out result); /// <summary> /// Tries to parse the given resource name string into a new <see cref="MobileDeviceConstantName"/> instance; /// optionally allowing an unparseable resource name. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item><description><c>mobileDeviceConstants/{criterion_id}</c></description></item> /// </list> /// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>. /// </remarks> /// <param name="mobileDeviceConstantName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="allowUnparsed"> /// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/> /// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is /// specified. /// </param> /// <param name="result"> /// When this method returns, the parsed <see cref="MobileDeviceConstantName"/>, or <c>null</c> if parsing /// failed. /// </param> /// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns> public static bool TryParse(string mobileDeviceConstantName, bool allowUnparsed, out MobileDeviceConstantName result) { gax::GaxPreconditions.CheckNotNull(mobileDeviceConstantName, nameof(mobileDeviceConstantName)); gax::TemplatedResourceName resourceName; if (s_criterion.TryParseName(mobileDeviceConstantName, out resourceName)) { result = FromCriterion(resourceName[0]); return true; } if (allowUnparsed) { if (gax::UnparsedResourceName.TryParse(mobileDeviceConstantName, out gax::UnparsedResourceName unparsedResourceName)) { result = FromUnparsed(unparsedResourceName); return true; } } result = null; return false; } private MobileDeviceConstantName(ResourceNameType type, gax::UnparsedResourceName unparsedResourceName = null, string criterionId = null) { Type = type; UnparsedResource = unparsedResourceName; CriterionId = criterionId; } /// <summary> /// Constructs a new instance of a <see cref="MobileDeviceConstantName"/> class from the component parts of /// pattern <c>mobileDeviceConstants/{criterion_id}</c> /// </summary> /// <param name="criterionId">The <c>Criterion</c> ID. Must not be <c>null</c> or empty.</param> public MobileDeviceConstantName(string criterionId) : this(ResourceNameType.Criterion, criterionId: gax::GaxPreconditions.CheckNotNullOrEmpty(criterionId, nameof(criterionId))) { } /// <summary>The <see cref="ResourceNameType"/> of the contained resource name.</summary> public ResourceNameType Type { get; } /// <summary> /// The contained <see cref="gax::UnparsedResourceName"/>. Only non-<c>null</c> if this instance contains an /// unparsed resource name. /// </summary> public gax::UnparsedResourceName UnparsedResource { get; } /// <summary> /// The <c>Criterion</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name. /// </summary> public string CriterionId { get; } /// <summary>Whether this instance contains a resource name with a known pattern.</summary> public bool IsKnownPattern => Type != ResourceNameType.Unparsed; /// <summary>The string representation of the resource name.</summary> /// <returns>The string representation of the resource name.</returns> public override string ToString() { switch (Type) { case ResourceNameType.Unparsed: return UnparsedResource.ToString(); case ResourceNameType.Criterion: return s_criterion.Expand(CriterionId); default: throw new sys::InvalidOperationException("Unrecognized resource-type."); } } /// <summary>Returns a hash code for this resource name.</summary> public override int GetHashCode() => ToString().GetHashCode(); /// <inheritdoc/> public override bool Equals(object obj) => Equals(obj as MobileDeviceConstantName); /// <inheritdoc/> public bool Equals(MobileDeviceConstantName other) => ToString() == other?.ToString(); /// <inheritdoc/> public static bool operator ==(MobileDeviceConstantName a, MobileDeviceConstantName b) => ReferenceEquals(a, b) || (a?.Equals(b) ?? false); /// <inheritdoc/> public static bool operator !=(MobileDeviceConstantName a, MobileDeviceConstantName b) => !(a == b); } public partial class MobileDeviceConstant { /// <summary> /// <see cref="gagvr::MobileDeviceConstantName"/>-typed view over the <see cref="ResourceName"/> resource name /// property. /// </summary> internal MobileDeviceConstantName ResourceNameAsMobileDeviceConstantName { get => string.IsNullOrEmpty(ResourceName) ? null : gagvr::MobileDeviceConstantName.Parse(ResourceName, allowUnparsed: true); set => ResourceName = value?.ToString() ?? ""; } /// <summary> /// <see cref="gagvr::MobileDeviceConstantName"/>-typed view over the <see cref="Name"/> resource name property. /// </summary> internal MobileDeviceConstantName MobileDeviceConstantName { get => string.IsNullOrEmpty(Name) ? null : gagvr::MobileDeviceConstantName.Parse(Name, allowUnparsed: true); set => Name = value?.ToString() ?? ""; } } }
namespace Manssiere.Core.DemoFlow { using System; using System.Collections.Generic; using System.Linq; using System.Windows.Controls; using Manssiere.Core.Graphics.Transition; /// <summary> /// The demo flow controlls the order of the effects /// </summary> public abstract class AbstractDemoFlow { #region Delegates public delegate void StateChangedEventHandler(AbstractDemoFlow sender, StateChangeEventArgs args); #endregion private readonly List<ControlDefinition> _controls = new List<ControlDefinition>(); private int _activeControl; /// <summary> /// Gets the state of the previous. /// </summary> /// <value>The state of the previous.</value> public ControlDefinition PreviousState { get { return _activeControl > 0 ? _controls[_activeControl - 1] : null; } } /// <summary> /// Gets the current scene. /// </summary> /// <value>The current scene.</value> public ControlDefinition CurrentScene { get { return _activeControl >= 0 && _activeControl < _controls.Count() ? _controls[_activeControl] : null; } } /// <summary> /// Show this effect without a transition. /// </summary> /// <typeparam name="T"></typeparam> /// <returns></returns> protected ControlDefinition Show<T>() where T : UserControl { var controlDefinition = new ControlDefinition(typeof(T), null, this); _controls.Add(controlDefinition); return new ControlDefinition(this); } /// <summary> /// Show this effect with a fadein. /// </summary> /// <typeparam name="T"></typeparam> /// <returns></returns> protected ControlDefinition FadeIn<T>() where T : UserControl { var controlDefinition = new ControlDefinition(typeof(T), typeof(FadeIn), this); _controls.Add(controlDefinition); return new ControlDefinition(this); } /// <summary> /// Show this effect with a fadein. /// </summary> /// <returns></returns> protected ControlDefinition FadeOut() { var controlDefinition = new ControlDefinition(null, typeof(FadeOut), this); _controls.Add(controlDefinition); return new ControlDefinition(this); } /// <summary> /// Global audio sync event. Register to this point to receive /// messages from the audio queue. /// </summary> public event StateChangedEventHandler StateChanged; /// <summary> /// Invokes the state changed. /// </summary> /// <param name="args">The <see cref="StateChangeEventArgs"/> instance containing the event data.</param> private void InvokeStateChanged(StateChangeEventArgs args) { var handler = StateChanged; if (handler != null) handler(this, args); } /// <summary> /// Moves to the next scene. /// </summary> public void NextScene() { if (_activeControl >= _controls.Count()) return; _activeControl++; if (_activeControl >= _controls.Count()) return; InvokeStateChanged(new StateChangeEventArgs(CurrentScene)); } /// <summary> /// Moves to the previouse. /// </summary> public void PreviousScene() { if (_activeControl <= 0) return; _activeControl--; // on the back state we don't send a transition, this speeds up skipping & eliminates errors. // we could do the same for 'manual' forward moving. Myabe we can use differentt keys for this. if (_activeControl < 0) return; InvokeStateChanged(new StateChangeEventArgs(new ControlDefinition(CurrentScene.ControlType, null, this))); } /// <summary> /// Gets a value indicating whether this instance has scenes. /// </summary> /// <value> /// <c>true</c> if this instance has scenes; otherwise, <c>false</c>. /// </value> public bool HasScenes { get { return _controls.Any(); } } #region Nested type: ControlDefinition public class ControlDefinition { /// <summary> /// Initializes a new instance of the <see cref="ControlDefinition"/> class. /// </summary> /// <param name="controlType">Type of the control.</param> /// <param name="transitionType">Type of the transition.</param> /// <param name="demoFlow">The demo flow.</param> public ControlDefinition(Type controlType, Type transitionType, AbstractDemoFlow demoFlow) { if (demoFlow == null) throw new ArgumentNullException("demoFlow"); DemoFlow = demoFlow; ControlType = controlType; TransitionType = transitionType; } /// <summary> /// Initializes a new instance of the <see cref="ControlDefinition"/> class. /// </summary> /// <param name="demoFlow">The demo flow.</param> public ControlDefinition(AbstractDemoFlow demoFlow) { DemoFlow = demoFlow; } /// <summary> /// Gets or sets the demo flow. /// </summary> /// <value>The demo flow.</value> private AbstractDemoFlow DemoFlow { get; set; } /// <summary> /// Gets or sets the type of the control. /// </summary> /// <value>The type of the control.</value> public Type ControlType { get; private set; } /// <summary> /// Gets or sets the type of the transition. /// </summary> /// <value>The type of the transition.</value> public Type TransitionType { get; private set; } /// <summary> /// Define the new effect to show /// </summary> /// <typeparam name="T"></typeparam> /// <returns></returns> public UsingDefinition TransitionTo<T>() { ControlType = typeof(T); DemoFlow._controls.Add(this); return new UsingDefinition(this); } #region Nested type: UsingDefinition public class UsingDefinition { private readonly ControlDefinition _controlDefinition; /// <summary> /// Initializes a new instance of the <see cref="UsingDefinition"/> class. /// </summary> /// <param name="controlDefinition">The control definition.</param> public UsingDefinition(ControlDefinition controlDefinition) { _controlDefinition = controlDefinition; } /// <summary> /// Define a transition between the effects. /// </summary> /// <typeparam name="T"></typeparam> /// <returns></returns> public ControlDefinition Using<T>() where T : ITransition { _controlDefinition.TransitionType = typeof(T); var controlDefinition = new ControlDefinition(_controlDefinition.DemoFlow); return controlDefinition; } } #endregion } #endregion } }
// // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // /** * Namespace: System.Web.UI.WebControls * Class: TableRowCollection * * Author: Gaurav Vaish * Maintainer: [email protected] * Contact: <[email protected]>, <[email protected]> * Implementation: yes * Status: 100% * * (C) Gaurav Vaish (2002) */ using System; using System.ComponentModel; using System.Web; using System.Collections; using System.Web.UI; namespace System.Web.UI.WebControls { [Editor ("System.Web.UI.Design.WebControls.TableRowsCollectionEditor, " + Consts.AssemblySystem_Design, typeof (System.Drawing.Design.UITypeEditor))] public sealed class TableRowCollection: IList, ICollection, IEnumerable { Table owner; internal TableRowCollection(Table owner) { if(owner == null) { throw new ArgumentNullException(); } this.owner = owner; } public int Count { get { return owner.Controls.Count; } } public bool IsReadOnly { get { return false; } } public bool IsSynchronized { get { return false; } } public TableRow this[int index] { get { return (TableRow)owner.Controls[index]; } } public object SyncRoot { get { return this; } } public int Add(TableRow row) { AddAt(-1, row); return owner.Controls.Count - 1; } public void AddAt(int index, TableRow row) { owner.Controls.AddAt(index, row); } public void AddRange(TableRow[] rows) { foreach(TableRow row in rows) { Add(row); } } public void Clear() { if(owner.HasControls()) { owner.Controls.Clear(); } } public void CopyTo(Array array, int index) { foreach(object current in this) { array.SetValue(current, index++); } } public int GetRowIndex(TableRow row) { if(!owner.HasControls()) { return -1; } return owner.Controls.IndexOf(row); } public IEnumerator GetEnumerator() { return owner.Controls.GetEnumerator(); } public void Remove(TableRow row) { owner.Controls.Remove(row); } public void RemoveAt(int index) { owner.Controls.RemoveAt(index); } int IList.Add(object o) { return Add((TableRow)o); } bool IList.Contains(object o) { return owner.Controls.Contains((TableRow)o); } int IList.IndexOf(object o) { return owner.Controls.IndexOf((TableRow)o); } void IList.Insert(int index, object o) { owner.Controls.AddAt(index, (TableRow)o); } void IList.Remove(object o) { owner.Controls.Remove((TableRow)o); } bool IList.IsFixedSize { get { return false; } } object IList.this[int index] { get { return this[index]; } set { RemoveAt(index); AddAt(index, (TableRow)value); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Globalization; using System.IO; using System.Linq; using System.Text; using System.Text.Encodings.Web; using System.Text.Unicode; using Xunit; namespace Microsoft.Framework.WebEncoders { public class UrlEncoderTests { private static UTF8Encoding _utf8EncodingThrowOnInvalidBytes = new UTF8Encoding(encoderShouldEmitUTF8Identifier: false, throwOnInvalidBytes: true); [Fact] public void TestSurrogate() { Assert.Equal("%F0%9F%92%A9", System.Text.Encodings.Web.UrlEncoder.Default.Encode("\U0001f4a9")); using (var writer = new StringWriter()) { System.Text.Encodings.Web.UrlEncoder.Default.Encode(writer, "\U0001f4a9"); Assert.Equal("%F0%9F%92%A9", writer.GetStringBuilder().ToString()); } } [Fact] public void Ctor_WithTextEncoderSettings() { // Arrange var filter = new TextEncoderSettings(); filter.AllowCharacters('a', 'b'); filter.AllowCharacters('\0', '&', '\uFFFF', 'd'); UrlEncoder encoder = new UrlEncoder(filter); // Act & assert Assert.Equal("a", encoder.UrlEncode("a")); Assert.Equal("b", encoder.UrlEncode("b")); Assert.Equal("%63", encoder.UrlEncode("c")); Assert.Equal("d", encoder.UrlEncode("d")); Assert.Equal("%00", encoder.UrlEncode("\0")); // we still always encode control chars Assert.Equal("%26", encoder.UrlEncode("&")); // we still always encode HTML-special chars Assert.Equal("%EF%BF%BF", encoder.UrlEncode("\uFFFF")); // we still always encode non-chars and other forbidden chars } [Fact] public void Ctor_WithUnicodeRanges() { // Arrange UrlEncoder encoder = new UrlEncoder(UnicodeRanges.Latin1Supplement, UnicodeRanges.MiscellaneousSymbols); // Act & assert Assert.Equal("%61", encoder.UrlEncode("a")); Assert.Equal("\u00E9", encoder.UrlEncode("\u00E9" /* LATIN SMALL LETTER E WITH ACUTE */)); Assert.Equal("\u2601", encoder.UrlEncode("\u2601" /* CLOUD */)); } [Fact] public void Ctor_WithNoParameters_DefaultsToBasicLatin() { // Arrange UrlEncoder encoder = new UrlEncoder(); // Act & assert Assert.Equal("a", encoder.UrlEncode("a")); Assert.Equal("%C3%A9", encoder.UrlEncode("\u00E9" /* LATIN SMALL LETTER E WITH ACUTE */)); Assert.Equal("%E2%98%81", encoder.UrlEncode("\u2601" /* CLOUD */)); } [Fact] public void Default_EquivalentToBasicLatin() { // Arrange UrlEncoder controlEncoder = new UrlEncoder(UnicodeRanges.BasicLatin); UrlEncoder testEncoder = UrlEncoder.Default; // Act & assert for (int i = 0; i <= char.MaxValue; i++) { if (!IsSurrogateCodePoint(i)) { string input = new string((char)i, 1); Assert.Equal(controlEncoder.UrlEncode(input), testEncoder.UrlEncode(input)); } } } [Fact] public void UrlEncode_AllRangesAllowed_StillEncodesForbiddenChars() { // Arrange UrlEncoder encoder = new UrlEncoder(UnicodeRanges.All); // Act & assert - BMP chars for (int i = 0; i <= 0xFFFF; i++) { string input = new string((char)i, 1); string expected; if (IsSurrogateCodePoint(i)) { expected = "%EF%BF%BD"; // unpaired surrogate -> Unicode replacement char } else { bool mustEncode = true; // RFC 3987, Sec. 2.2 gives the list of allowed chars // (We allow 'ipchar' except for "'", "&", "+", "%", and "=" if (('a' <= i && i <= 'z') || ('A' <= i && i <= 'Z') || ('0' <= i && i <= '9')) { mustEncode = false; // ALPHA / DIGIT } else if ((0x00A0 <= i && i <= 0xD7FF) | (0xF900 <= i && i <= 0xFDCF) | (0xFDF0 <= i && i <= 0xFFEF)) { mustEncode = !UnicodeHelpers.IsCharacterDefined((char)i); // 'ucschar' } else { switch (i) { // iunreserved case '-': case '.': case '_': case '~': // isegment-nz-nc case '@': // sub-delims case '!': case '$': case '(': case ')': case '*': case ',': case ';': mustEncode = false; break; } } if (mustEncode) { expected = GetKnownGoodPercentEncodedValue(i); } else { expected = input; // no encoding } } string retVal = encoder.UrlEncode(input); Assert.Equal(expected, retVal); } // Act & assert - astral chars for (int i = 0x10000; i <= 0x10FFFF; i++) { string input = char.ConvertFromUtf32(i); string expected = GetKnownGoodPercentEncodedValue(i); string retVal = encoder.UrlEncode(input); Assert.Equal(expected, retVal); } } [Fact] public void UrlEncode_BadSurrogates_ReturnsUnicodeReplacementChar() { // Arrange UrlEncoder encoder = new UrlEncoder(UnicodeRanges.All); // allow all codepoints // "a<unpaired leading>b<unpaired trailing>c<trailing before leading>d<unpaired trailing><valid>e<high at end of string>" const string input = "a\uD800b\uDFFFc\uDFFF\uD800d\uDFFF\uD800\uDFFFe\uD800"; const string expected = "a%EF%BF%BDb%EF%BF%BDc%EF%BF%BD%EF%BF%BDd%EF%BF%BD%F0%90%8F%BFe%EF%BF%BD"; // 'D800' 'DFFF' was preserved since it's valid // Act string retVal = encoder.UrlEncode(input); // Assert Assert.Equal(expected, retVal); } [Fact] public void UrlEncode_EmptyStringInput_ReturnsEmptyString() { // Arrange UrlEncoder encoder = new UrlEncoder(); // Act & assert Assert.Equal("", encoder.UrlEncode("")); } [Fact] public void UrlEncode_InputDoesNotRequireEncoding_ReturnsOriginalStringInstance() { // Arrange UrlEncoder encoder = new UrlEncoder(); string input = "Hello,there!"; // Act & assert Assert.Same(input, encoder.UrlEncode(input)); } [Fact] public void UrlEncode_NullInput_ReturnsNull() { // Arrange UrlEncoder encoder = new UrlEncoder(); Assert.Throws<ArgumentNullException>(() => { encoder.UrlEncode(null); }); } [Fact] public void UrlEncode_WithCharsRequiringEncodingAtBeginning() { Assert.Equal(@"%26Hello,there!", new UrlEncoder().UrlEncode("&Hello,there!")); } [Fact] public void UrlEncode_WithCharsRequiringEncodingAtEnd() { Assert.Equal(@"Hello,there!%26", new UrlEncoder().UrlEncode("Hello,there!&")); } [Fact] public void UrlEncode_WithCharsRequiringEncodingInMiddle() { Assert.Equal(@"Hello,%20%26there!", new UrlEncoder().UrlEncode("Hello, &there!")); } [Fact] public void UrlEncode_WithCharsRequiringEncodingInterspersed() { Assert.Equal(@"Hello,%20%3Cthere%3E!", new UrlEncoder().UrlEncode("Hello, <there>!")); } [Fact] public void UrlEncode_CharArray() { // Arrange UrlEncoder encoder = new UrlEncoder(); var output = new StringWriter(); // Act encoder.UrlEncode("Hello+world!".ToCharArray(), 3, 5, output); // Assert Assert.Equal("lo%2Bwo", output.ToString()); } [Fact] public void UrlEncode_StringSubstring() { // Arrange UrlEncoder encoder = new UrlEncoder(); var output = new StringWriter(); // Act encoder.UrlEncode("Hello+world!", 3, 5, output); // Assert Assert.Equal("lo%2Bwo", output.ToString()); } [Fact] public void UrlEncode_DoesNotOutputHtmlSensitiveCharacters() { // Per the design document, we provide additional defense-in-depth // by never emitting HTML-sensitive characters unescaped. // Arrange UrlEncoder urlEncoder = new UrlEncoder(UnicodeRanges.All); HtmlEncoder htmlEncoder = new HtmlEncoder(UnicodeRanges.All); // Act & assert for (int i = 0; i <= 0x10FFFF; i++) { if (IsSurrogateCodePoint(i)) { continue; // surrogates don't matter here } string urlEncoded = urlEncoder.UrlEncode(char.ConvertFromUtf32(i)); string thenHtmlEncoded = htmlEncoder.HtmlEncode(urlEncoded); Assert.Equal(urlEncoded, thenHtmlEncoded); // should have contained no HTML-sensitive characters } } private static string GetKnownGoodPercentEncodedValue(int codePoint) { // Convert the code point to UTF16, then call Encoding.UTF8.GetBytes, then hex-encode everything return string.Concat(_utf8EncodingThrowOnInvalidBytes.GetBytes(Char.ConvertFromUtf32(codePoint)).Select(b => string.Format(CultureInfo.InvariantCulture, "%{0:X2}", b))); } private static bool IsSurrogateCodePoint(int codePoint) { return (0xD800 <= codePoint && codePoint <= 0xDFFF); } } }
// // Copyright (c) 2004-2011 Jaroslaw Kowalski <[email protected]> // // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * Neither the name of Jaroslaw Kowalski nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. // #if !SILVERLIGHT && !__IOS__ && !__ANDROID__ namespace NLog.Targets { using System; using System.Collections.Generic; using System.ComponentModel; using System.Configuration; using System.Data; using System.Data.Common; using System.Globalization; using System.Reflection; using System.Text; using System.Transactions; using NLog.Common; using NLog.Config; using NLog.Internal; using NLog.Layouts; using ConfigurationManager = System.Configuration.ConfigurationManager; /// <summary> /// Writes log messages to the database using an ADO.NET provider. /// </summary> /// <seealso href="https://github.com/nlog/nlog/wiki/Database-target">Documentation on NLog Wiki</seealso> /// <example> /// <para> /// The configuration is dependent on the database type, because /// there are differnet methods of specifying connection string, SQL /// command and command parameters. /// </para> /// <para>MS SQL Server using System.Data.SqlClient:</para> /// <code lang="XML" source="examples/targets/Configuration File/Database/MSSQL/NLog.config" height="450" /> /// <para>Oracle using System.Data.OracleClient:</para> /// <code lang="XML" source="examples/targets/Configuration File/Database/Oracle.Native/NLog.config" height="350" /> /// <para>Oracle using System.Data.OleDBClient:</para> /// <code lang="XML" source="examples/targets/Configuration File/Database/Oracle.OleDB/NLog.config" height="350" /> /// <para>To set up the log target programmatically use code like this (an equivalent of MSSQL configuration):</para> /// <code lang="C#" source="examples/targets/Configuration API/Database/MSSQL/Example.cs" height="630" /> /// </example> [Target("Database")] public sealed class DatabaseTarget : Target, IInstallable { private static Assembly systemDataAssembly = typeof(IDbConnection).Assembly; private IDbConnection activeConnection = null; private string activeConnectionString; /// <summary> /// Initializes a new instance of the <see cref="DatabaseTarget" /> class. /// </summary> public DatabaseTarget() { this.Parameters = new List<DatabaseParameterInfo>(); this.InstallDdlCommands = new List<DatabaseCommandInfo>(); this.UninstallDdlCommands = new List<DatabaseCommandInfo>(); this.DBProvider = "sqlserver"; this.DBHost = "."; this.ConnectionStringsSettings = ConfigurationManager.ConnectionStrings; this.CommandType = CommandType.Text; } /// <summary> /// Gets or sets the name of the database provider. /// </summary> /// <remarks> /// <para> /// The parameter name should be a provider invariant name as registered in machine.config or app.config. Common values are: /// </para> /// <ul> /// <li><c>System.Data.SqlClient</c> - <see href="http://msdn.microsoft.com/en-us/library/system.data.sqlclient.aspx">SQL Sever Client</see></li> /// <li><c>System.Data.SqlServerCe.3.5</c> - <see href="http://www.microsoft.com/sqlserver/2005/en/us/compact.aspx">SQL Sever Compact 3.5</see></li> /// <li><c>System.Data.OracleClient</c> - <see href="http://msdn.microsoft.com/en-us/library/system.data.oracleclient.aspx">Oracle Client from Microsoft</see> (deprecated in .NET Framework 4)</li> /// <li><c>Oracle.DataAccess.Client</c> - <see href="http://www.oracle.com/technology/tech/windows/odpnet/index.html">ODP.NET provider from Oracle</see></li> /// <li><c>System.Data.SQLite</c> - <see href="http://sqlite.phxsoftware.com/">System.Data.SQLite driver for SQLite</see></li> /// <li><c>Npgsql</c> - <see href="http://npgsql.projects.postgresql.org/">Npgsql driver for PostgreSQL</see></li> /// <li><c>MySql.Data.MySqlClient</c> - <see href="http://www.mysql.com/downloads/connector/net/">MySQL Connector/Net</see></li> /// </ul> /// <para>(Note that provider invariant names are not supported on .NET Compact Framework).</para> /// <para> /// Alternatively the parameter value can be be a fully qualified name of the provider /// connection type (class implementing <see cref="IDbConnection" />) or one of the following tokens: /// </para> /// <ul> /// <li><c>sqlserver</c>, <c>mssql</c>, <c>microsoft</c> or <c>msde</c> - SQL Server Data Provider</li> /// <li><c>oledb</c> - OLEDB Data Provider</li> /// <li><c>odbc</c> - ODBC Data Provider</li> /// </ul> /// </remarks> /// <docgen category='Connection Options' order='10' /> [RequiredParameter] [DefaultValue("sqlserver")] public string DBProvider { get; set; } /// <summary> /// Gets or sets the name of the connection string (as specified in <see href="http://msdn.microsoft.com/en-us/library/bf7sd233.aspx">&lt;connectionStrings&gt; configuration section</see>. /// </summary> /// <docgen category='Connection Options' order='10' /> public string ConnectionStringName { get; set; } /// <summary> /// Gets or sets the connection string. When provided, it overrides the values /// specified in DBHost, DBUserName, DBPassword, DBDatabase. /// </summary> /// <docgen category='Connection Options' order='10' /> public Layout ConnectionString { get; set; } /// <summary> /// Gets or sets the connection string using for installation and uninstallation. If not provided, regular ConnectionString is being used. /// </summary> /// <docgen category='Installation Options' order='10' /> public Layout InstallConnectionString { get; set; } /// <summary> /// Gets the installation DDL commands. /// </summary> /// <docgen category='Installation Options' order='10' /> [ArrayParameter(typeof(DatabaseCommandInfo), "install-command")] public IList<DatabaseCommandInfo> InstallDdlCommands { get; private set; } /// <summary> /// Gets the uninstallation DDL commands. /// </summary> /// <docgen category='Installation Options' order='10' /> [ArrayParameter(typeof(DatabaseCommandInfo), "uninstall-command")] public IList<DatabaseCommandInfo> UninstallDdlCommands { get; private set; } /// <summary> /// Gets or sets a value indicating whether to keep the /// database connection open between the log events. /// </summary> /// <docgen category='Connection Options' order='10' /> [DefaultValue(false)] public bool KeepConnection { get; set; } /// <summary> /// Obsolete - value will be ignored! The logging code always runs outside of transaction. /// /// Gets or sets a value indicating whether to use database transactions. /// Some data providers require this. /// </summary> /// <docgen category='Connection Options' order='10' /> /// <remarks> /// This option was removed in NLog 4.0 because the logging code always runs outside of transaction. /// This ensures that the log gets written to the database if you rollback the main transaction because of an error and want to log the error. /// </remarks> [Obsolete("Obsolete - value will be ignored - logging code always runs outside of transaction. Will be removed in NLog 6.")] public bool? UseTransactions { get; set; } /// <summary> /// Gets or sets the database host name. If the ConnectionString is not provided /// this value will be used to construct the "Server=" part of the /// connection string. /// </summary> /// <docgen category='Connection Options' order='10' /> public Layout DBHost { get; set; } /// <summary> /// Gets or sets the database user name. If the ConnectionString is not provided /// this value will be used to construct the "User ID=" part of the /// connection string. /// </summary> /// <docgen category='Connection Options' order='10' /> public Layout DBUserName { get; set; } /// <summary> /// Gets or sets the database password. If the ConnectionString is not provided /// this value will be used to construct the "Password=" part of the /// connection string. /// </summary> /// <docgen category='Connection Options' order='10' /> public Layout DBPassword { get; set; } /// <summary> /// Gets or sets the database name. If the ConnectionString is not provided /// this value will be used to construct the "Database=" part of the /// connection string. /// </summary> /// <docgen category='Connection Options' order='10' /> public Layout DBDatabase { get; set; } /// <summary> /// Gets or sets the text of the SQL command to be run on each log level. /// </summary> /// <remarks> /// Typically this is a SQL INSERT statement or a stored procedure call. /// It should use the database-specific parameters (marked as <c>@parameter</c> /// for SQL server or <c>:parameter</c> for Oracle, other data providers /// have their own notation) and not the layout renderers, /// because the latter is prone to SQL injection attacks. /// The layout renderers should be specified as &lt;parameter /&gt; elements instead. /// </remarks> /// <docgen category='SQL Statement' order='10' /> [RequiredParameter] public Layout CommandText { get; set; } /// <summary> /// Gets or sets the type of the SQL command to be run on each log level. /// </summary> /// <remarks> /// This specifies how the command text is interpreted, as "Text" (default) or as "StoredProcedure". /// When using the value StoredProcedure, the commandText-property would /// normally be the name of the stored procedure. TableDirect method is not supported in this context. /// </remarks> /// <docgen category='SQL Statement' order='11' /> [DefaultValue(CommandType.Text)] public CommandType CommandType { get; set; } /// <summary> /// Gets the collection of parameters. Each parameter contains a mapping /// between NLog layout and a database named or positional parameter. /// </summary> /// <docgen category='SQL Statement' order='12' /> [ArrayParameter(typeof(DatabaseParameterInfo), "parameter")] public IList<DatabaseParameterInfo> Parameters { get; private set; } internal DbProviderFactory ProviderFactory { get; set; } // this is so we can mock the connection string without creating sub-processes internal ConnectionStringSettingsCollection ConnectionStringsSettings { get; set; } internal Type ConnectionType { get; set; } /// <summary> /// Performs installation which requires administrative permissions. /// </summary> /// <param name="installationContext">The installation context.</param> public void Install(InstallationContext installationContext) { this.RunInstallCommands(installationContext, this.InstallDdlCommands); } /// <summary> /// Performs uninstallation which requires administrative permissions. /// </summary> /// <param name="installationContext">The installation context.</param> public void Uninstall(InstallationContext installationContext) { this.RunInstallCommands(installationContext, this.UninstallDdlCommands); } /// <summary> /// Determines whether the item is installed. /// </summary> /// <param name="installationContext">The installation context.</param> /// <returns> /// Value indicating whether the item is installed or null if it is not possible to determine. /// </returns> public bool? IsInstalled(InstallationContext installationContext) { return null; } internal IDbConnection OpenConnection(string connectionString) { IDbConnection connection; if (this.ProviderFactory != null) { connection = this.ProviderFactory.CreateConnection(); } else { connection = (IDbConnection)Activator.CreateInstance(this.ConnectionType); } connection.ConnectionString = connectionString; connection.Open(); return connection; } /// <summary> /// Initializes the target. Can be used by inheriting classes /// to initialize logging. /// </summary> [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA2204:Literals should be spelled correctly", MessageId = "connectionStrings", Justification = "Name of the config file section.")] protected override void InitializeTarget() { base.InitializeTarget(); #pragma warning disable 618 if (UseTransactions.HasValue) #pragma warning restore 618 { InternalLogger.Warn("UseTransactions is obsolete and will not be used - will be removed in NLog 6"); } bool foundProvider = false; if (!string.IsNullOrEmpty(this.ConnectionStringName)) { // read connection string and provider factory from the configuration file var cs = this.ConnectionStringsSettings[this.ConnectionStringName]; if (cs == null) { throw new NLogConfigurationException("Connection string '" + this.ConnectionStringName + "' is not declared in <connectionStrings /> section."); } this.ConnectionString = SimpleLayout.Escape(cs.ConnectionString); if (!string.IsNullOrEmpty(cs.ProviderName)) { this.ProviderFactory = DbProviderFactories.GetFactory(cs.ProviderName); foundProvider = true; } } if (!foundProvider) { foreach (DataRow row in DbProviderFactories.GetFactoryClasses().Rows) { var invariantname = (string)row["InvariantName"]; if (invariantname == this.DBProvider) { this.ProviderFactory = DbProviderFactories.GetFactory(this.DBProvider); foundProvider = true; break; } } } if (!foundProvider) { switch (this.DBProvider.ToUpper(CultureInfo.InvariantCulture)) { case "SQLSERVER": case "MSSQL": case "MICROSOFT": case "MSDE": this.ConnectionType = systemDataAssembly.GetType("System.Data.SqlClient.SqlConnection", true); break; case "OLEDB": this.ConnectionType = systemDataAssembly.GetType("System.Data.OleDb.OleDbConnection", true); break; case "ODBC": this.ConnectionType = systemDataAssembly.GetType("System.Data.Odbc.OdbcConnection", true); break; default: this.ConnectionType = Type.GetType(this.DBProvider, true); break; } } } /// <summary> /// Closes the target and releases any unmanaged resources. /// </summary> protected override void CloseTarget() { base.CloseTarget(); InternalLogger.Trace("DatabaseTarget: close connection because of CloseTarget"); this.CloseConnection(); } /// <summary> /// Writes the specified logging event to the database. It creates /// a new database command, prepares parameters for it by calculating /// layouts and executes the command. /// </summary> /// <param name="logEvent">The logging event.</param> protected override void Write(LogEventInfo logEvent) { try { this.WriteEventToDatabase(logEvent); } catch (Exception exception) { InternalLogger.Error(exception, "Error when writing to database."); if (exception.MustBeRethrownImmediately()) { throw; } InternalLogger.Trace("DatabaseTarget: close connection because of error"); this.CloseConnection(); throw; } finally { if (!this.KeepConnection) { InternalLogger.Trace("DatabaseTarget: close connection (KeepConnection = false)."); this.CloseConnection(); } } } /// <summary> /// Writes an array of logging events to the log target. By default it iterates on all /// events and passes them to "Write" method. Inheriting classes can use this method to /// optimize batch writes. /// </summary> /// <param name="logEvents">Logging events to be written out.</param> protected override void Write(AsyncLogEventInfo[] logEvents) { var buckets = SortHelpers.BucketSort(logEvents, c => this.BuildConnectionString(c.LogEvent)); try { foreach (var kvp in buckets) { foreach (AsyncLogEventInfo ev in kvp.Value) { try { this.WriteEventToDatabase(ev.LogEvent); ev.Continuation(null); } catch (Exception exception) { // in case of exception, close the connection and report it InternalLogger.Error(exception, "Error when writing to database."); if (exception.MustBeRethrownImmediately()) { throw; } InternalLogger.Trace("DatabaseTarget: close connection because of exception"); this.CloseConnection(); ev.Continuation(exception); if (exception.MustBeRethrown()) { throw; } } } } } finally { if (!this.KeepConnection) { InternalLogger.Trace("DatabaseTarget: close connection because of KeepConnection=false"); this.CloseConnection(); } } } [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Security", "CA2100:Review SQL queries for security vulnerabilities", Justification = "It's up to the user to ensure proper quoting.")] private void WriteEventToDatabase(LogEventInfo logEvent) { //Always suppress transaction so that the caller does not rollback loggin if they are rolling back their transaction. using (TransactionScope transactionScope = new TransactionScope(TransactionScopeOption.Suppress)) { this.EnsureConnectionOpen(this.BuildConnectionString(logEvent)); IDbCommand command = this.activeConnection.CreateCommand(); command.CommandText = this.CommandText.Render(logEvent); command.CommandType = this.CommandType; InternalLogger.Trace("Executing {0}: {1}", command.CommandType, command.CommandText); foreach (DatabaseParameterInfo par in this.Parameters) { IDbDataParameter p = command.CreateParameter(); p.Direction = ParameterDirection.Input; if (par.Name != null) { p.ParameterName = par.Name; } if (par.Size != 0) { p.Size = par.Size; } if (par.Precision != 0) { p.Precision = par.Precision; } if (par.Scale != 0) { p.Scale = par.Scale; } string stringValue = par.Layout.Render(logEvent); p.Value = stringValue; command.Parameters.Add(p); InternalLogger.Trace(" Parameter: '{0}' = '{1}' ({2})", p.ParameterName, p.Value, p.DbType); } int result = command.ExecuteNonQuery(); InternalLogger.Trace("Finished execution, result = {0}", result); //not really needed as there is no transaction at all. transactionScope.Complete(); } } private string BuildConnectionString(LogEventInfo logEvent) { if (this.ConnectionString != null) { return this.ConnectionString.Render(logEvent); } var sb = new StringBuilder(); sb.Append("Server="); sb.Append(this.DBHost.Render(logEvent)); sb.Append(";"); if (this.DBUserName == null) { sb.Append("Trusted_Connection=SSPI;"); } else { sb.Append("User id="); sb.Append(this.DBUserName.Render(logEvent)); sb.Append(";Password="); sb.Append(this.DBPassword.Render(logEvent)); sb.Append(";"); } if (this.DBDatabase != null) { sb.Append("Database="); sb.Append(this.DBDatabase.Render(logEvent)); } return sb.ToString(); } private void EnsureConnectionOpen(string connectionString) { if (this.activeConnection != null) { if (this.activeConnectionString != connectionString) { InternalLogger.Trace("DatabaseTarget: close connection because of opening new."); this.CloseConnection(); } } if (this.activeConnection != null) { return; } InternalLogger.Trace("DatabaseTarget: open connection."); this.activeConnection = this.OpenConnection(connectionString); this.activeConnectionString = connectionString; } private void CloseConnection() { if (this.activeConnection != null) { this.activeConnection.Close(); this.activeConnection.Dispose(); this.activeConnection = null; this.activeConnectionString = null; } } [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Security", "CA2100:Review SQL queries for security vulnerabilities", Justification = "It's up to the user to ensure proper quoting.")] private void RunInstallCommands(InstallationContext installationContext, IEnumerable<DatabaseCommandInfo> commands) { // create log event that will be used to render all layouts LogEventInfo logEvent = installationContext.CreateLogEvent(); try { foreach (var commandInfo in commands) { string cs; if (commandInfo.ConnectionString != null) { // if there is connection string specified on the command info, use it cs = commandInfo.ConnectionString.Render(logEvent); } else if (this.InstallConnectionString != null) { // next, try InstallConnectionString cs = this.InstallConnectionString.Render(logEvent); } else { // if it's not defined, fall back to regular connection string cs = this.BuildConnectionString(logEvent); } this.EnsureConnectionOpen(cs); var command = this.activeConnection.CreateCommand(); command.CommandType = commandInfo.CommandType; command.CommandText = commandInfo.Text.Render(logEvent); try { installationContext.Trace("Executing {0} '{1}'", command.CommandType, command.CommandText); command.ExecuteNonQuery(); } catch (Exception exception) { if (exception.MustBeRethrownImmediately()) { throw; } if (commandInfo.IgnoreFailures || installationContext.IgnoreFailures) { installationContext.Warning(exception.Message); } else { installationContext.Error(exception.Message); throw; } } } } finally { InternalLogger.Trace("DatabaseTarget: close connection after install."); this.CloseConnection(); } } } } #endif
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// // Copyright (c) Microsoft Corporation. All rights reserved. //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// using System; using System.Net; namespace Microsoft.Zelig.Test { public class WebProxyTests : TestBase, ITestInterface { [SetUp] public InitializeResult Initialize() { Log.Comment("Adding set up for the tests."); // Add your functionality here. return InitializeResult.ReadyToGo; } [TearDown] public void CleanUp() { Log.Comment("Cleaning up after the tests."); // TODO: Add your clean up steps here. } public override TestResult Run( string[] args ) { return TestResult.Pass; } //--// //--// //--// [TestMethod] public TestResult TestDefaultWebProxy() { TestResult result = TestResult.Pass; Log.Comment("Set proxy using WebProxy()"); UriProperties props = new UriProperties("http", "www.microsoft.com"); Uri uri = new Uri(props.OriginalUri); WebProxy proxyObject = new WebProxy(); WebRequest wr = WebRequest.Create(uri); wr.Proxy = proxyObject; Uri wrUri = wr.Proxy.GetProxy(uri); if (wrUri != uri) { result = TestResult.Fail; } return result; } [TestMethod] public TestResult TestWebProxyConstructor1() { TestResult result = TestResult.Pass; Log.Comment("Set proxy using WebProxy(string)"); UriProperties props = new UriProperties("http", "www.microsoft.com"); Uri uri = new Uri(props.OriginalUri); string proxy = "itgproxy.redmond.corp.microsoft.com"; WebProxy proxyObject = new WebProxy(proxy); WebRequest wr = WebRequest.Create(uri); wr.Proxy = proxyObject; Uri wrUri = wr.Proxy.GetProxy(uri); if (wrUri.Host != proxy) { result = TestResult.Fail; } return result; } [TestMethod] public TestResult TestWebProxyConstructor2() { TestResult result = TestResult.Pass; Log.Comment("Set proxy using WebProxy(string, bool)"); UriProperties props = new UriProperties("http", "www.microsoft.com"); Uri uri = new Uri(props.OriginalUri); string proxy = "itgproxy.redmond.corp.microsoft.com"; WebProxy proxyObject = new WebProxy(proxy, true); WebRequest wr = WebRequest.Create(uri); wr.Proxy = proxyObject; Uri wrUri = wr.Proxy.GetProxy(uri); if (wrUri.Host != proxy) { result = TestResult.Fail; } return result; } [TestMethod] public TestResult TestWebProxyConstructor3() { TestResult result = TestResult.Pass; Log.Comment("Set proxy using WebProxy(string, int)"); UriProperties props = new UriProperties("http", "www.microsoft.com"); Uri uri = new Uri(props.OriginalUri); string proxy = "itgproxy.redmond.corp.microsoft.com"; WebProxy proxyObject = new WebProxy(proxy, 80); WebRequest wr = WebRequest.Create(uri); wr.Proxy = proxyObject; Uri wrUri = wr.Proxy.GetProxy(uri); if (wrUri.Host != proxy) { result = TestResult.Fail; } return result; } [TestMethod] public TestResult TestWebProxyConstructor4() { TestResult result = TestResult.Pass; Log.Comment("Set proxy using WebProxy(System.Uri, bool)"); UriProperties props = new UriProperties("http", "www.microsoft.com"); Uri uri = new Uri(props.OriginalUri); WebProxy proxyObject = new WebProxy(uri, true); WebRequest wr = WebRequest.Create(uri); wr.Proxy = proxyObject; Uri wrUri = wr.Proxy.GetProxy(uri); if (wrUri != uri) { result = TestResult.Fail; } return result; } [TestMethod] public TestResult TestWebProxyConstructor5() { TestResult result = TestResult.Pass; Log.Comment("Set proxy using WebProxy(System.Uri)"); UriProperties props = new UriProperties("http", "www.microsoft.com"); Uri uri = new Uri(props.OriginalUri); WebProxy proxyObject = new WebProxy(uri); if (proxyObject.BypassProxyOnLocal) { result = TestResult.Fail; } WebRequest wr = WebRequest.Create(uri); wr.Proxy = proxyObject; Uri wrUri = wr.Proxy.GetProxy(uri); if (wrUri != uri) { result = TestResult.Fail; } return result; } [TestMethod] public TestResult TestWebProxyInvalidserverAddress() { TestResult result = TestResult.Pass; Log.Comment("Set proxy using WebProxy(string, bool)"); UriProperties props = new UriProperties("http", "www.microsoft.com"); Uri uri = new Uri(props.OriginalUri); try { WebProxy proxyObject = new WebProxy("ht1p:itgproxy", true); result = TestResult.Fail; } catch (ArgumentException ex) { Log.Exception("Expect ArgumentException: ", ex); } try { WebProxy proxyObject = new WebProxy(string.Empty, true); result = TestResult.Fail; } catch (ArgumentOutOfRangeException ex) { Log.Exception("Expect ArgumentOutOfRangeException: ", ex); } return result; } } }
#region S# License /****************************************************************************************** NOTICE!!! This program and source code is owned and licensed by StockSharp, LLC, www.stocksharp.com Viewing or use of this code requires your acceptance of the license agreement found at https://github.com/StockSharp/StockSharp/blob/master/LICENSE Removal of this comment is a violation of the license agreement. Project: StockSharp.Algo.Algo File: BasketPortfolio.cs Created: 2015, 11, 11, 2:32 PM Copyright 2010 by StockSharp, LLC *******************************************************************************************/ #endregion S# License namespace StockSharp.Algo { using System; using System.Collections.Generic; using System.ComponentModel; using System.Linq; using Ecng.Common; using Ecng.Collections; using StockSharp.BusinessEntities; using StockSharp.Messages; /// <summary> /// Basket portfolio. /// </summary> public abstract class BasketPortfolio : Portfolio { /// <summary> /// Portfolios from which this basket is created. /// </summary> [Browsable(false)] public abstract IEnumerable<Portfolio> InnerPortfolios { get; } /// <summary> /// Positions from which this basket is created. /// </summary> [Browsable(false)] public abstract IEnumerable<BasketPosition> InnerPositions { get; } } /// <summary> /// Portfolios basket based on the weights <see cref="WeightedPortfolio.Weights"/>. /// </summary> public class WeightedPortfolio : BasketPortfolio { private sealed class WeightsDictionary : CachedSynchronizedDictionary<Portfolio, decimal> { private sealed class WeightedPosition : BasketPosition { public WeightedPosition(WeightedPortfolio portfolio, IEnumerable<Position> innerPositions) { if (innerPositions == null) throw new ArgumentNullException(nameof(innerPositions)); _innerPositions = innerPositions; var beginValue = 0m; var currentValue = 0m; var blockedValue = 0m; foreach (var position in _innerPositions) { var mult = portfolio.Weights[position.Portfolio]; beginValue += mult * position.BeginValue; currentValue += mult * position.CurrentValue; blockedValue += mult * position.BlockedValue; } BeginValue = beginValue; BlockedValue = blockedValue; CurrentValue = currentValue; } private readonly IEnumerable<Position> _innerPositions; public override IEnumerable<Position> InnerPositions => _innerPositions; } private readonly WeightedPortfolio _parent; private readonly IConnector _connector; public WeightsDictionary(WeightedPortfolio parent, IConnector connector) { if (parent == null) throw new ArgumentNullException(nameof(parent)); _parent = parent; _connector = connector; } public IEnumerable<BasketPosition> Positions { get { return CachedKeys .SelectMany(pf => _connector.Positions.Where(pos => pos.Portfolio == pf)) .GroupBy(pos => pos.Security) .Select(g => new WeightedPosition(_parent, g)); } } public override void Add(Portfolio key, decimal value) { base.Add(key, value); ((INotifyPropertyChanged)key).PropertyChanged += OnPortfolioChanged; RefreshName(); } public override bool Remove(Portfolio key) { if (base.Remove(key)) { ((INotifyPropertyChanged)key).PropertyChanged -= OnPortfolioChanged; RefreshName(); return true; } return false; } public override void Clear() { foreach (var portfolio in CachedKeys) Remove(portfolio); } private void OnPortfolioChanged(object sender, PropertyChangedEventArgs e) { RefreshParent(); } private void RefreshName() { _parent.Name = CachedPairs.Select(p => "{0}*{1}".Put(p.Value, p.Key)).Join(", "); RefreshParent(); } private void RefreshParent() { var currencyType = _parent.Currency; var beginValue = 0m.ToCurrency(currencyType ?? CurrencyTypes.USD); var currentValue = 0m.ToCurrency(currencyType ?? CurrencyTypes.USD); var leverage = 0m.ToCurrency(currencyType ?? CurrencyTypes.USD); var commission = 0m.ToCurrency(currencyType ?? CurrencyTypes.USD); foreach (var pair in CachedPairs) { var portfolio = pair.Key; var weight = (Currency)pair.Value; beginValue += Multiple(beginValue, weight, portfolio.BeginValue); currentValue += Multiple(currentValue, weight, portfolio.CurrentValue); leverage += Multiple(leverage, weight, portfolio.Leverage); commission += Multiple(commission, weight, portfolio.Commission); } _parent.BeginValue = beginValue.Value; _parent.CurrentValue = currentValue.Value; _parent.Leverage = leverage.Value / Count; _parent.Commission = commission.Value; } private static Currency Multiple(Currency currency, Currency weight, Currency part) { if (currency == null) throw new ArgumentNullException(nameof(currency)); if (part == null) throw new ArgumentNullException(nameof(part)); if (currency.Type != part.Type) part = part.Convert(currency.Type); return currency * weight * part; } } /// <summary> /// Initializes a new instance of the <see cref="WeightedPortfolio"/>. /// </summary> /// <param name="connector">The connection of interaction with trade systems.</param> public WeightedPortfolio(IConnector connector) { _weights = new WeightsDictionary(this, connector); } private readonly WeightsDictionary _weights; /// <summary> /// Instruments and their weighting coefficients in the basket. /// </summary> public SynchronizedDictionary<Portfolio, decimal> Weights => _weights; /// <summary> /// Portfolios from which this basket is created. /// </summary> public override IEnumerable<Portfolio> InnerPortfolios => _weights.CachedKeys; /// <summary> /// Positions from which this basket is created. /// </summary> public override IEnumerable<BasketPosition> InnerPositions => _weights.Positions; } }
using System; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.IO; using System.Linq; using System.Runtime.Serialization.Formatters.Binary; using System.Text; using OELib.LibraryBase; using OELib.LibraryBase.Messages; using OELib.PokingConnection.Messages; using ProtoBuf; namespace OELibProtobufFormatter { public static class StreamUtils { public static byte[] ReadExactly(this Stream stream, int count) { var buffer = new byte[count]; var offset = 0; while (offset < count) { var read = stream.Read(buffer, offset, count - offset); if (read == 0) throw new EndOfStreamException(); offset += read; } System.Diagnostics.Debug.Assert(offset == count); return buffer; } public static void CopyBytesTo(this Stream stream, Stream destination, int count) { var b = stream.ReadExactly(count); destination.Write(b, 0, count); } } public class SerializationHelper { public Dictionary<string, Tuple<Action<Stream, object>, Func<Stream, string, object>>> ManualSerilaizationActions { get; } = new Dictionary<string, Tuple<Action<Stream, object>, Func<Stream, string, object>>>(); private readonly HashSet<Guid> _nonProtobufTypes = new HashSet<Guid>(); // known types that are not protobuf enabled private readonly HashSet<Guid> _protobufTypes = new HashSet<Guid>(); // known protobuf types private readonly BinaryFormatter _bf; public SerializationHelper() { SetupManualSerData(); _bf = new BinaryFormatter(); } public static void WriteSerializationType(Stream stream, SerializationType type) { var t = new byte[2]; t[0] = (byte)type; t[1] = (byte)type; stream.Write(t, 0, 2); } public static SerializationType ReadSerializationType(Stream stream) { var t = stream.ReadExactly(2); if (t[0] != t[1]) throw new DataMisalignedException(); return (SerializationType)t[0]; } [SuppressMessage("ReSharper", "AssignNullToNotNullAttribute")] public SerializationType DetermineApproprateSerialization(object obj) { if (obj == null) return SerializationType.Manual; if (ManualSerilaizationActions.ContainsKey(obj.GetType().AssemblyQualifiedName)) return SerializationType.Manual; var objType = obj.GetType(); if (_protobufTypes.Contains(objType.GUID)) return SerializationType.Protobuf; if (_nonProtobufTypes.Contains(objType.GUID)) return SerializationType.Binary; var attrbs = objType.GetCustomAttributes(true).ToList(); if (attrbs.Any(a => a is ProtoContractAttribute)) { _protobufTypes.Add(objType.GUID); return SerializationType.Protobuf; } _nonProtobufTypes.Add(objType.GUID); return SerializationType.Binary; } public static Guid ReadGuid(Stream stream) { var b = stream.ReadExactly(16); return new Guid(b); } public static void WriteGuid(Stream stream, Guid guid) { var guidArr = guid.ToByteArray(); stream.Write(guidArr, 0, 16); } public static short ReadShort(Stream stream) { var buffer = stream.ReadExactly(2); return BitConverter.ToInt16(buffer, 0); } public static void WriteShort(Stream stream, short data) { stream.Write(BitConverter.GetBytes(data), 0, 2); } /// <summary> /// Writes a unicode string to a stream (first two bytes = length) /// </summary> /// <param name="stream">stream to write to</param> /// <param name="str">string to write</param> public static void WriteString(Stream stream, string str) { var bb = Encoding.Unicode.GetBytes(str).ToArray(); WriteShort(stream, (short)bb.Length); stream.Write(bb, 0, bb.Length); } /// <summary> /// Reads an unicode string from the stream (first two bytese = length) /// </summary> /// <param name="stream">stream to read from</param> /// <returns>the string</returns> public static string ReadString(Stream stream) { var len = ReadShort(stream); var b = stream.ReadExactly(len); return Encoding.Unicode.GetString(b); } [SuppressMessage("ReSharper", "AssignNullToNotNullAttribute")] public void SetupManualSerData() { void WritePrio(Stream stream, object obj) { WriteShort(stream, (short)((Message)obj).Priority); } //system messages ManualSerilaizationActions.Add(typeof(Bye).AssemblyQualifiedName, new Tuple<Action<Stream, object>, Func<Stream, string, object>>( WritePrio, (str, guid) => new Bye { Priority = (Priority)ReadShort(str) })); ManualSerilaizationActions.Add(typeof(Ping).AssemblyQualifiedName, new Tuple<Action<Stream, object>, Func<Stream, string, object>>( WritePrio, (str, guid) => new Ping { Priority = (Priority)ReadShort(str) })); ManualSerilaizationActions.Add(typeof(Pong).AssemblyQualifiedName, new Tuple<Action<Stream, object>, Func<Stream, string, object>>( WritePrio, (str, guid) => new Pong { Priority = (Priority)ReadShort(str) })); ManualSerilaizationActions.Add("null", new Tuple<Action<Stream, object>, Func<Stream, string, object>>( (s, e) => { }, (str, guid) => null)); //poking connection messages ManualSerilaizationActions.Add(typeof(CallMethod).AssemblyQualifiedName, new Tuple<Action<Stream, object>, Func<Stream, string, object>>( (s, o) => // serialization { var oo = (CallMethod)o; WriteGuid(s, oo.MessageID); WriteGuid(s, oo.CallingMessageID); WriteString(s, oo.MethodName); WriteShort(s, (short)oo.Priority); WriteShort(s, (short)oo.Arguments.Count()); oo.Arguments.ToList().ForEach(arg => { Serialize(s, arg); }); }, (s, g) => //deserialization { var messageID = ReadGuid(s); var callingMessageID = ReadGuid(s); var methodName = ReadString(s); var priority = (Priority)ReadShort(s); var argsCount = ReadShort(s); var args = Enumerable.Range(0, argsCount).Select(i => { var o = Deserialize(s); return o; }).ToArray(); var r = new CallMethod(methodName, args, null) { MessageID = messageID, CallingMessageID = callingMessageID, Priority = priority }; //TODO: Generic types not supported, do it return r; } )); ManualSerilaizationActions.Add(typeof(CallMethodResponse).AssemblyQualifiedName, new Tuple<Action<Stream, object>, Func<Stream, string, object>>( (s, o) => // serialization { var oo = (CallMethodResponse)o; WriteGuid(s, oo.MessageID); WriteGuid(s, oo.CallingMessageID); WriteShort(s, (short)oo.Priority); Serialize(s, oo.Response); Serialize(s, oo.Exception); }, (s, g) => //deserialization { var messageID = ReadGuid(s); var callingMessageID = ReadGuid(s); var priority = (Priority)ReadShort(s); var response = Deserialize(s); var exception = Deserialize(s); var r = new CallMethodResponse(new CallMethod("", new object[] { }, null) { MessageID = callingMessageID }, response, exception as Exception) { MessageID = messageID, CallingMessageID = callingMessageID, Priority = priority }; return r; } )); } [SuppressMessage("ReSharper", "AssignNullToNotNullAttribute")] public void ManuallySerialize(Stream stream, object obj) { var name = obj == null ? "null" : obj.GetType().AssemblyQualifiedName; if (!ManualSerilaizationActions.ContainsKey(name)) throw new InvalidOperationException("Cannot manually serialize this type"); WriteString(stream, name); ManualSerilaizationActions[name].Item1(stream, obj); } public object ManuallyDeserialize(Stream stream) { var assemblyQualifiedName = ReadString(stream); if (!ManualSerilaizationActions.ContainsKey(assemblyQualifiedName)) throw new InvalidOperationException("Cannot manually deserialize this type"); return ManualSerilaizationActions[assemblyQualifiedName].Item2(stream, assemblyQualifiedName); } public void BinarySerialize(Stream stream, object obj) { _bf.Serialize(stream, obj); } public object BinaryDeserialize(Stream stream) { return _bf.Deserialize(stream); } public void ProtobufSerialize(Stream stream, object obj) { //TODO: this is very slow. calls should be cashed var objT = obj.GetType(); WriteString(stream, objT.AssemblyQualifiedName); // protobuf deserialize needs to know the type var methodInfo = typeof(Serializer).GetMethods(System.Reflection.BindingFlags.Public | System.Reflection.BindingFlags .Static).Where(mi => mi.Name == "Serialize").FirstOrDefault(mi => mi.GetParameters().Count() == 2 && mi.GetParameters()[0].ParameterType.Name == "Stream" && mi.GetParameters()[1].ParameterType.Name == "T"); // todo: how can this be done better var methGenericInfo = methodInfo?.MakeGenericMethod(objT); using (var ms = new MemoryStream()) { methGenericInfo?.Invoke(null, new[] { ms, obj }); var len = ms.Position; WriteShort(stream, (short)len); ms.Seek(0, SeekOrigin.Begin); if (len > 0) ms.CopyBytesTo(stream, (int)len); } } public object ProtobufDeserialize(Stream stream) { //TODO: this is very slow. calls should be cashed var typeName = ReadString(stream); var type = Type.GetType(typeName); var len = ReadShort(stream); using (var ms = new MemoryStream(len){ Capacity = len }) { if (len > 0) stream.CopyBytesTo(ms, len); ms.Seek(0, SeekOrigin.Begin); var methodInfo = typeof(Serializer).GetMethods(System.Reflection.BindingFlags.Public | System.Reflection.BindingFlags .Static).Where(mi => mi.Name == "Deserialize") .FirstOrDefault(mi => mi.GetParameters().Count() == 2 && mi.GetParameters()[0].ParameterType.Name == "Type" && mi.GetParameters()[1].ParameterType.Name == "Stream"); return methodInfo?.Invoke(null, new object[] { type, ms }); } } public object Deserialize(Stream serializationStream) { var serializationType = ReadSerializationType(serializationStream); switch (serializationType) { case SerializationType.Manual: return ManuallyDeserialize(serializationStream); case SerializationType.Binary: return BinaryDeserialize(serializationStream); case SerializationType.Protobuf: return ProtobufDeserialize(serializationStream); } return null; } public void Serialize(Stream serializationStream, object graph) { var type = DetermineApproprateSerialization(graph); WriteSerializationType(serializationStream, type); switch (type) { case SerializationType.Manual: ManuallySerialize(serializationStream, graph); break; case SerializationType.Binary: BinarySerialize(serializationStream, graph); break; case SerializationType.Protobuf: ProtobufSerialize(serializationStream, graph); break; default: throw new ArgumentOutOfRangeException(); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using System.Globalization; using Xunit; public static class SByteTests { [Fact] public static void TestCtor_Empty() { var i = new sbyte(); Assert.Equal(0, i); } [Fact] public static void TestCtor_Value() { sbyte i = 41; Assert.Equal(41, i); } [Fact] public static void TestMaxValue() { Assert.Equal(0x7F, sbyte.MaxValue); } [Fact] public static void TestMinValue() { Assert.Equal(-0x80, sbyte.MinValue); } [Theory] [InlineData((sbyte)114, (sbyte)114, 0)] [InlineData((sbyte)114, sbyte.MinValue, 1)] [InlineData((sbyte)114, (sbyte)-123, 1)] [InlineData((sbyte)114, (sbyte)0, 1)] [InlineData((sbyte)114, (sbyte)123, -1)] [InlineData((sbyte)114, sbyte.MaxValue, -1)] [InlineData((sbyte)114, null, 1)] public static void TestCompareTo(sbyte i, object value, int expected) { if (value is sbyte) { Assert.Equal(expected, Math.Sign(i.CompareTo((sbyte)value))); } IComparable comparable = i; Assert.Equal(expected, Math.Sign(comparable.CompareTo(value))); } [Fact] public static void TestCompareTo_Invalid() { IComparable comparable = (sbyte)114; Assert.Throws<ArgumentException>(null, () => comparable.CompareTo("a")); // Obj is not a sbyte Assert.Throws<ArgumentException>(null, () => comparable.CompareTo(234)); // Obj is not a sbyte } [Theory] [InlineData((sbyte)78, (sbyte)78, true)] [InlineData((sbyte)78, (sbyte)-78, false)] [InlineData((sbyte)78, (sbyte)0, false)] [InlineData((sbyte)0, (sbyte)0, true)] [InlineData((sbyte)-78, (sbyte)-78, true)] [InlineData((sbyte)-78, (sbyte)78, false)] [InlineData((sbyte)78, null, false)] [InlineData((sbyte)78, "78", false)] [InlineData((sbyte)78, 78, false)] public static void TestEquals(sbyte i1, object obj, bool expected) { if (obj is sbyte) { sbyte i2 = (sbyte)obj; Assert.Equal(expected, i1.Equals(i2)); Assert.Equal(expected, i1.GetHashCode().Equals(i2.GetHashCode())); } Assert.Equal(expected, i1.Equals(obj)); } public static IEnumerable<object[]> ToString_TestData() { NumberFormatInfo emptyFormat = NumberFormatInfo.CurrentInfo; yield return new object[] { sbyte.MinValue, "G", emptyFormat, "-128" }; yield return new object[] { (sbyte)-123, "G", emptyFormat, "-123" }; yield return new object[] { (sbyte)0, "G", emptyFormat, "0" }; yield return new object[] { (sbyte)123, "G", emptyFormat, "123" }; yield return new object[] { sbyte.MaxValue, "G", emptyFormat, "127" }; yield return new object[] { (sbyte)0x24, "x", emptyFormat, "24" }; yield return new object[] { (sbyte)24, "N", emptyFormat, string.Format("{0:N}", 24.00) }; NumberFormatInfo customFormat = new NumberFormatInfo(); customFormat.NegativeSign = "#"; customFormat.NumberDecimalSeparator = "~"; customFormat.NumberGroupSeparator = "*"; yield return new object[] { (sbyte)-24, "N", customFormat, "#24~00" }; yield return new object[] { (sbyte)24, "N", customFormat, "24~00" }; } [Theory] [MemberData(nameof(ToString_TestData))] public static void TestToString(sbyte i, string format, IFormatProvider provider, string expected) { // Format is case insensitive string upperFormat = format.ToUpperInvariant(); string lowerFormat = format.ToLowerInvariant(); string upperExpected = expected.ToUpperInvariant(); string lowerExpected = expected.ToLowerInvariant(); bool isDefaultProvider = (provider == null || provider == NumberFormatInfo.CurrentInfo); if (string.IsNullOrEmpty(format) || format.ToUpperInvariant() == "G") { if (isDefaultProvider) { Assert.Equal(upperExpected, i.ToString()); Assert.Equal(upperExpected, i.ToString((IFormatProvider)null)); } Assert.Equal(upperExpected, i.ToString(provider)); } if (isDefaultProvider) { Assert.Equal(upperExpected, i.ToString(upperFormat)); Assert.Equal(lowerExpected, i.ToString(lowerFormat)); Assert.Equal(upperExpected, i.ToString(upperFormat, null)); Assert.Equal(lowerExpected, i.ToString(lowerFormat, null)); } Assert.Equal(upperExpected, i.ToString(upperFormat, provider)); Assert.Equal(lowerExpected, i.ToString(lowerFormat, provider)); } [Fact] public static void TestToString_Invalid() { var numberFormat = new NumberFormatInfo(); sbyte i1 = 63; Assert.Equal("63", i1.ToString("G", numberFormat)); sbyte i2 = 82; Assert.Equal("82", i2.ToString("g", numberFormat)); numberFormat.NegativeSign = "xx"; // setting it to trash to make sure it doesn't show up numberFormat.NumberGroupSeparator = "*"; numberFormat.NumberNegativePattern = 0; numberFormat.NumberDecimalSeparator = "."; sbyte i3 = 24; Assert.Equal("24.00", i3.ToString("N", numberFormat)); sbyte i4 = -10; Assert.Equal("F6", i4.ToString("X", numberFormat)); } public static IEnumerable<object[]> ParseValidData() { NumberFormatInfo defaultFormat = null; NumberStyles defaultStyle = NumberStyles.Integer; var emptyNfi = new NumberFormatInfo(); var testNfi = new NumberFormatInfo(); testNfi.CurrencySymbol = "$"; yield return new object[] { "-123", defaultStyle, defaultFormat, (sbyte)-123 }; yield return new object[] { "0", defaultStyle, defaultFormat, (sbyte)0 }; yield return new object[] { "123", defaultStyle, defaultFormat, (sbyte)123 }; yield return new object[] { " 123 ", defaultStyle, defaultFormat, (sbyte)123 }; yield return new object[] { "127", defaultStyle, defaultFormat, (sbyte)127 }; yield return new object[] { "12", NumberStyles.HexNumber, defaultFormat, (sbyte)0x12 }; yield return new object[] { "10", NumberStyles.AllowThousands, defaultFormat, (sbyte)10 }; yield return new object[] { "(123)", NumberStyles.AllowParentheses, defaultFormat, (sbyte)-123 }; // Parentheses = negative yield return new object[] { "123", defaultStyle, emptyNfi, (sbyte)123 }; yield return new object[] { "123", NumberStyles.Any, emptyNfi, (sbyte)123 }; yield return new object[] { "12", NumberStyles.HexNumber, emptyNfi, (sbyte)0x12 }; yield return new object[] { "$100", NumberStyles.Currency, testNfi, (sbyte)100 }; } public static IEnumerable<object[]> Parse_Valid_TestData() { NumberStyles defaultStyle = NumberStyles.Integer; NumberFormatInfo emptyFormat = new NumberFormatInfo(); NumberFormatInfo customFormat = new NumberFormatInfo(); customFormat.CurrencySymbol = "$"; yield return new object[] { "-123", defaultStyle, null, (sbyte)-123 }; yield return new object[] { "0", defaultStyle, null, (sbyte)0 }; yield return new object[] { "123", defaultStyle, null, (sbyte)123 }; yield return new object[] { "+123", defaultStyle, null, (sbyte)123 }; yield return new object[] { " 123 ", defaultStyle, null, (sbyte)123 }; yield return new object[] { "127", defaultStyle, null, (sbyte)127 }; yield return new object[] { "12", NumberStyles.HexNumber, null, (sbyte)0x12 }; yield return new object[] { "10", NumberStyles.AllowThousands, null, (sbyte)10 }; yield return new object[] { "(123)", NumberStyles.AllowParentheses, null, (sbyte)-123 }; // Parentheses = negative yield return new object[] { "123", defaultStyle, emptyFormat, (sbyte)123 }; yield return new object[] { "123", NumberStyles.Any, emptyFormat, (sbyte)123 }; yield return new object[] { "12", NumberStyles.HexNumber, emptyFormat, (sbyte)0x12 }; yield return new object[] { "a", NumberStyles.HexNumber, null, (sbyte)0xa }; yield return new object[] { "A", NumberStyles.HexNumber, null, (sbyte)0xa }; yield return new object[] { "$100", NumberStyles.Currency, customFormat, (sbyte)100 }; } [Theory] [MemberData(nameof(Parse_Valid_TestData))] public static void TestParse(string value, NumberStyles style, IFormatProvider provider, sbyte expected) { sbyte result; // If no style is specified, use the (String) or (String, IFormatProvider) overload if (style == NumberStyles.Integer) { Assert.True(sbyte.TryParse(value, out result)); Assert.Equal(expected, result); Assert.Equal(expected, sbyte.Parse(value)); // If a format provider is specified, but the style is the default, use the (String, IFormatProvider) overload if (provider != null) { Assert.Equal(expected, sbyte.Parse(value, provider)); } } // If a format provider isn't specified, test the default one, using a new instance of NumberFormatInfo Assert.True(sbyte.TryParse(value, style, provider ?? new NumberFormatInfo(), out result)); Assert.Equal(expected, result); // If a format provider isn't specified, test the default one, using the (String, NumberStyles) overload if (provider == null) { Assert.Equal(expected, sbyte.Parse(value, style)); } Assert.Equal(expected, sbyte.Parse(value, style, provider ?? new NumberFormatInfo())); } public static IEnumerable<object[]> Parse_Invalid_TestData() { NumberStyles defaultStyle = NumberStyles.Integer; NumberFormatInfo customFormat = new NumberFormatInfo(); customFormat.CurrencySymbol = "$"; customFormat.NumberDecimalSeparator = "."; yield return new object[] { null, defaultStyle, null, typeof(ArgumentNullException) }; yield return new object[] { "", defaultStyle, null, typeof(FormatException) }; yield return new object[] { " \t \n \r ", defaultStyle, null, typeof(FormatException) }; yield return new object[] { "Garbage", defaultStyle, null, typeof(FormatException) }; yield return new object[] { "ab", defaultStyle, null, typeof(FormatException) }; // Hex value yield return new object[] { "1E23", defaultStyle, null, typeof(FormatException) }; // Exponent yield return new object[] { "(123)", defaultStyle, null, typeof(FormatException) }; // Parentheses yield return new object[] { 100.ToString("C0"), defaultStyle, null, typeof(FormatException) }; // Currency yield return new object[] { 1000.ToString("N0"), defaultStyle, null, typeof(FormatException) }; // Thousands yield return new object[] { 67.90.ToString("F2"), defaultStyle, null, typeof(FormatException) }; // Decimal yield return new object[] { "+-123", defaultStyle, null, typeof(FormatException) }; yield return new object[] { "-+123", defaultStyle, null, typeof(FormatException) }; yield return new object[] { "+abc", NumberStyles.HexNumber, null, typeof(FormatException) }; yield return new object[] { "-abc", NumberStyles.HexNumber, null, typeof(FormatException) }; yield return new object[] { "- 123", defaultStyle, null, typeof(FormatException) }; yield return new object[] { "+ 123", defaultStyle, null, typeof(FormatException) }; yield return new object[] { "ab", NumberStyles.None, null, typeof(FormatException) }; // Hex value yield return new object[] { " 123 ", NumberStyles.None, null, typeof(FormatException) }; // Trailing and leading whitespace yield return new object[] { "67.90", defaultStyle, customFormat, typeof(FormatException) }; // Decimal yield return new object[] { "-129", defaultStyle, null, typeof(OverflowException) }; // < min value yield return new object[] { "128", defaultStyle, null, typeof(OverflowException) }; // > max value } [Theory] [MemberData(nameof(Parse_Invalid_TestData))] public static void TestParse_Invalid(string value, NumberStyles style, IFormatProvider provider, Type exceptionType) { sbyte result; // If no style is specified, use the (String) or (String, IFormatProvider) overload if (style == NumberStyles.Integer) { Assert.False(sbyte.TryParse(value, out result)); Assert.Equal(default(sbyte), result); Assert.Throws(exceptionType, () => sbyte.Parse(value)); // If a format provider is specified, but the style is the default, use the (String, IFormatProvider) overload if (provider != null) { Assert.Throws(exceptionType, () => sbyte.Parse(value, provider)); } } // If a format provider isn't specified, test the default one, using a new instance of NumberFormatInfo Assert.False(sbyte.TryParse(value, style, provider ?? new NumberFormatInfo(), out result)); Assert.Equal(default(sbyte), result); // If a format provider isn't specified, test the default one, using the (String, NumberStyles) overload if (provider == null) { Assert.Throws(exceptionType, () => sbyte.Parse(value, style)); } Assert.Throws(exceptionType, () => sbyte.Parse(value, style, provider ?? new NumberFormatInfo())); } [Theory] [InlineData(NumberStyles.HexNumber | NumberStyles.AllowParentheses)] [InlineData(unchecked((NumberStyles)0xFFFFFC00))] public static void TestTryParse_InvalidNumberStyle_ThrowsArgumentException(NumberStyles style) { sbyte result = 0; Assert.Throws<ArgumentException>(() => sbyte.TryParse("1", style, null, out result)); Assert.Equal(default(sbyte), result); Assert.Throws<ArgumentException>(() => sbyte.Parse("1", style)); Assert.Throws<ArgumentException>(() => sbyte.Parse("1", style, null)); } }
using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Web; using System.Xml.Linq; using AutoMapper; using Umbraco.Core; using Umbraco.Core.Cache; using Umbraco.Core.IO; using Umbraco.Core.Events; using Umbraco.Core.IO; using Umbraco.Core.Services; using umbraco.DataLayer; namespace umbraco.BusinessLogic { /// <summary> /// umbraco.BusinessLogic.ApplicationTree provides access to the application tree structure in umbraco. /// An application tree is a collection of nodes belonging to one or more application(s). /// Through this class new application trees can be created, modified and deleted. /// </summary> [Obsolete("This has been superceded by ApplicationContext.Current.ApplicationTreeService")] public class ApplicationTree { /// <summary> /// Gets or sets a value indicating whether this <see cref="ApplicationTree"/> is silent. /// </summary> /// <value><c>true</c> if silent; otherwise, <c>false</c>.</value> public bool Silent { get; set; } /// <summary> /// Gets or sets a value indicating whether this <see cref="ApplicationTree"/> should initialize. /// </summary> /// <value><c>true</c> if initialize; otherwise, <c>false</c>.</value> public bool Initialize { get; set; } /// <summary> /// Gets or sets the sort order. /// </summary> /// <value>The sort order.</value> public byte SortOrder { get; set; } /// <summary> /// Gets the application alias. /// </summary> /// <value>The application alias.</value> public string ApplicationAlias { get; private set; } /// <summary> /// Gets the tree alias. /// </summary> /// <value>The alias.</value> public string Alias { get; private set; } /// <summary> /// Gets or sets the tree title. /// </summary> /// <value>The title.</value> public string Title { get; set; } /// <summary> /// Gets or sets the icon closed. /// </summary> /// <value>The icon closed.</value> public string IconClosed { get; set; } /// <summary> /// Gets or sets the icon opened. /// </summary> /// <value>The icon opened.</value> public string IconOpened { get; set; } /// <summary> /// Gets or sets the name of the assembly. /// </summary> /// <value>The name of the assembly.</value> public string AssemblyName { get; set; } /// <summary> /// Gets or sets the tree type. /// </summary> /// <value>The type.</value> public string Type { get; set; } private Type _runtimeType; /// <summary> /// Returns the CLR type based on it's assembly name stored in the config /// </summary> /// <returns></returns> internal Type GetRuntimeType() { return _runtimeType ?? (_runtimeType = System.Type.GetType(Type)); } /// <summary> /// Gets or sets the default tree action. /// </summary> /// <value>The action.</value> public string Action { get; set; } /// <summary> /// Initializes a new instance of the <see cref="ApplicationTree"/> class. /// </summary> public ApplicationTree() { } /// <summary> /// Initializes a new instance of the <see cref="ApplicationTree"/> class. /// </summary> /// <param name="silent">if set to <c>true</c> [silent].</param> /// <param name="initialize">if set to <c>true</c> [initialize].</param> /// <param name="sortOrder">The sort order.</param> /// <param name="applicationAlias">The application alias.</param> /// <param name="alias">The tree alias.</param> /// <param name="title">The tree title.</param> /// <param name="iconClosed">The icon closed.</param> /// <param name="iconOpened">The icon opened.</param> /// <param name="assemblyName">Name of the assembly.</param> /// <param name="type">The tree type.</param> /// <param name="action">The default tree action.</param> public ApplicationTree(bool silent, bool initialize, byte sortOrder, string applicationAlias, string alias, string title, string iconClosed, string iconOpened, string assemblyName, string type, string action) { this.Silent = silent; this.Initialize = initialize; this.SortOrder = sortOrder; this.ApplicationAlias = applicationAlias; this.Alias = alias; this.Title = title; this.IconClosed = iconClosed; this.IconOpened = iconOpened; this.AssemblyName = assemblyName; this.Type = type; this.Action = action; } /// <summary> /// Creates a new application tree. /// </summary> /// <param name="silent">if set to <c>true</c> [silent].</param> /// <param name="initialize">if set to <c>true</c> [initialize].</param> /// <param name="sortOrder">The sort order.</param> /// <param name="applicationAlias">The application alias.</param> /// <param name="alias">The alias.</param> /// <param name="title">The title.</param> /// <param name="iconClosed">The icon closed.</param> /// <param name="iconOpened">The icon opened.</param> /// <param name="assemblyName">Name of the assembly.</param> /// <param name="type">The type.</param> /// <param name="action">The action.</param> public static void MakeNew(bool silent, bool initialize, byte sortOrder, string applicationAlias, string alias, string title, string iconClosed, string iconOpened, string assemblyName, string type, string action) { ApplicationContext.Current.Services.ApplicationTreeService.MakeNew(initialize, sortOrder, applicationAlias, alias, title, iconClosed, iconOpened, assemblyName.IsNullOrWhiteSpace() ? type : string.Format("{0}.{1},{0}", assemblyName, type)); } /// <summary> /// Saves this instance. /// </summary> public void Save() { ApplicationContext.Current.Services.ApplicationTreeService.SaveTree( Mapper.Map<ApplicationTree, Umbraco.Core.Models.ApplicationTree>(this)); } /// <summary> /// Deletes this instance. /// </summary> public void Delete() { ApplicationContext.Current.Services.ApplicationTreeService.DeleteTree( Mapper.Map<ApplicationTree, Umbraco.Core.Models.ApplicationTree>(this)); } /// <summary> /// Gets an ApplicationTree by it's tree alias. /// </summary> /// <param name="treeAlias">The tree alias.</param> /// <returns>An ApplicationTree instance</returns> public static ApplicationTree getByAlias(string treeAlias) { return Mapper.Map<Umbraco.Core.Models.ApplicationTree, ApplicationTree>( ApplicationContext.Current.Services.ApplicationTreeService.GetByAlias(treeAlias)); } /// <summary> /// Gets all applicationTrees registered in umbraco from the umbracoAppTree table.. /// </summary> /// <returns>Returns a ApplicationTree Array</returns> public static ApplicationTree[] getAll() { return ApplicationContext.Current.Services.ApplicationTreeService.GetAll() .Select(Mapper.Map<Umbraco.Core.Models.ApplicationTree, ApplicationTree>) .ToArray(); } /// <summary> /// Gets the application tree for the applcation with the specified alias /// </summary> /// <param name="applicationAlias">The application alias.</param> /// <returns>Returns a ApplicationTree Array</returns> public static ApplicationTree[] getApplicationTree(string applicationAlias) { return ApplicationContext.Current.Services.ApplicationTreeService.GetApplicationTrees(applicationAlias) .Select(Mapper.Map<Umbraco.Core.Models.ApplicationTree, ApplicationTree>) .ToArray(); } /// <summary> /// Gets the application tree for the applcation with the specified alias /// </summary> /// <param name="applicationAlias">The application alias.</param> /// <param name="onlyInitializedApplications"></param> /// <returns>Returns a ApplicationTree Array</returns> public static ApplicationTree[] getApplicationTree(string applicationAlias, bool onlyInitializedApplications) { return ApplicationContext.Current.Services.ApplicationTreeService.GetApplicationTrees(applicationAlias, onlyInitializedApplications) .Select(Mapper.Map<Umbraco.Core.Models.ApplicationTree, ApplicationTree>) .ToArray(); } } }
using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Reflection; namespace SwaggerDemoApi.Areas.HelpPage { /// <summary> /// This class will create an object of a given type and populate it with sample data. /// </summary> public class ObjectGenerator { internal const int DefaultCollectionSize = 2; private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator(); /// <summary> /// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types: /// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc. /// Complex types: POCO types. /// Nullables: <see cref="Nullable{T}"/>. /// Arrays: arrays of simple types or complex types. /// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/> /// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc /// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>. /// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>. /// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>An object of the given type.</returns> public object GenerateObject(Type type) { return GenerateObject(type, new Dictionary<Type, object>()); } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")] private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences) { try { if (SimpleTypeObjectGenerator.CanGenerateObject(type)) { return SimpleObjectGenerator.GenerateObject(type); } if (type.IsArray) { return GenerateArray(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsGenericType) { return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IDictionary)) { return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences); } if (typeof(IDictionary).IsAssignableFrom(type)) { return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IList) || type == typeof(IEnumerable) || type == typeof(ICollection)) { return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences); } if (typeof(IList).IsAssignableFrom(type)) { return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IQueryable)) { return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsEnum) { return GenerateEnum(type); } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } } catch { // Returns null if anything fails return null; } return null; } private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences) { Type genericTypeDefinition = type.GetGenericTypeDefinition(); if (genericTypeDefinition == typeof(Nullable<>)) { return GenerateNullable(type, createdObjectReferences); } if (genericTypeDefinition == typeof(KeyValuePair<,>)) { return GenerateKeyValuePair(type, createdObjectReferences); } if (IsTuple(genericTypeDefinition)) { return GenerateTuple(type, createdObjectReferences); } Type[] genericArguments = type.GetGenericArguments(); if (genericArguments.Length == 1) { if (genericTypeDefinition == typeof(IList<>) || genericTypeDefinition == typeof(IEnumerable<>) || genericTypeDefinition == typeof(ICollection<>)) { Type collectionType = typeof(List<>).MakeGenericType(genericArguments); return GenerateCollection(collectionType, collectionSize, createdObjectReferences); } if (genericTypeDefinition == typeof(IQueryable<>)) { return GenerateQueryable(type, collectionSize, createdObjectReferences); } Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]); if (closedCollectionType.IsAssignableFrom(type)) { return GenerateCollection(type, collectionSize, createdObjectReferences); } } if (genericArguments.Length == 2) { if (genericTypeDefinition == typeof(IDictionary<,>)) { Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments); return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences); } Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]); if (closedDictionaryType.IsAssignableFrom(type)) { return GenerateDictionary(type, collectionSize, createdObjectReferences); } } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } return null; } private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = type.GetGenericArguments(); object[] parameterValues = new object[genericArgs.Length]; bool failedToCreateTuple = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < genericArgs.Length; i++) { parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences); failedToCreateTuple &= parameterValues[i] == null; } if (failedToCreateTuple) { return null; } object result = Activator.CreateInstance(type, parameterValues); return result; } private static bool IsTuple(Type genericTypeDefinition) { return genericTypeDefinition == typeof(Tuple<>) || genericTypeDefinition == typeof(Tuple<,>) || genericTypeDefinition == typeof(Tuple<,,>) || genericTypeDefinition == typeof(Tuple<,,,>) || genericTypeDefinition == typeof(Tuple<,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,,>); } private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = keyValuePairType.GetGenericArguments(); Type typeK = genericArgs[0]; Type typeV = genericArgs[1]; ObjectGenerator objectGenerator = new ObjectGenerator(); object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences); object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences); if (keyObject == null && valueObject == null) { // Failed to create key and values return null; } object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject); return result; } private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = arrayType.GetElementType(); Array result = Array.CreateInstance(type, size); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); result.SetValue(element, i); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences) { Type typeK = typeof(object); Type typeV = typeof(object); if (dictionaryType.IsGenericType) { Type[] genericArgs = dictionaryType.GetGenericArguments(); typeK = genericArgs[0]; typeV = genericArgs[1]; } object result = Activator.CreateInstance(dictionaryType); MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd"); MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey"); ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences); if (newKey == null) { // Cannot generate a valid key return null; } bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey }); if (!containsKey) { object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences); addMethod.Invoke(result, new object[] { newKey, newValue }); } } return result; } private static object GenerateEnum(Type enumType) { Array possibleValues = Enum.GetValues(enumType); if (possibleValues.Length > 0) { return possibleValues.GetValue(0); } return null; } private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences) { bool isGeneric = queryableType.IsGenericType; object list; if (isGeneric) { Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments()); list = GenerateCollection(listType, size, createdObjectReferences); } else { list = GenerateArray(typeof(object[]), size, createdObjectReferences); } if (list == null) { return null; } if (isGeneric) { Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments()); MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType }); return asQueryableMethod.Invoke(null, new[] { list }); } return Queryable.AsQueryable((IEnumerable)list); } private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = collectionType.IsGenericType ? collectionType.GetGenericArguments()[0] : typeof(object); object result = Activator.CreateInstance(collectionType); MethodInfo addMethod = collectionType.GetMethod("Add"); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); addMethod.Invoke(result, new object[] { element }); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences) { Type type = nullableType.GetGenericArguments()[0]; ObjectGenerator objectGenerator = new ObjectGenerator(); return objectGenerator.GenerateObject(type, createdObjectReferences); } private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences) { object result = null; if (createdObjectReferences.TryGetValue(type, out result)) { // The object has been created already, just return it. This will handle the circular reference case. return result; } if (type.IsValueType) { result = Activator.CreateInstance(type); } else { ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes); if (defaultCtor == null) { // Cannot instantiate the type because it doesn't have a default constructor return null; } result = defaultCtor.Invoke(new object[0]); } createdObjectReferences.Add(type, result); SetPublicProperties(type, result, createdObjectReferences); SetPublicFields(type, result, createdObjectReferences); return result; } private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (PropertyInfo property in properties) { if (property.CanWrite) { object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences); property.SetValue(obj, propertyValue, null); } } } private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (FieldInfo field in fields) { object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences); field.SetValue(obj, fieldValue); } } private class SimpleTypeObjectGenerator { private long _index = 0; private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators(); [SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")] private static Dictionary<Type, Func<long, object>> InitializeGenerators() { return new Dictionary<Type, Func<long, object>> { { typeof(Boolean), index => true }, { typeof(Byte), index => (Byte)64 }, { typeof(Char), index => (Char)65 }, { typeof(DateTime), index => DateTime.Now }, { typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) }, { typeof(DBNull), index => DBNull.Value }, { typeof(Decimal), index => (Decimal)index }, { typeof(Double), index => (Double)(index + 0.1) }, { typeof(Guid), index => Guid.NewGuid() }, { typeof(Int16), index => (Int16)(index % Int16.MaxValue) }, { typeof(Int32), index => (Int32)(index % Int32.MaxValue) }, { typeof(Int64), index => (Int64)index }, { typeof(Object), index => new object() }, { typeof(SByte), index => (SByte)64 }, { typeof(Single), index => (Single)(index + 0.1) }, { typeof(String), index => { return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index); } }, { typeof(TimeSpan), index => { return TimeSpan.FromTicks(1234567); } }, { typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) }, { typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) }, { typeof(UInt64), index => (UInt64)index }, { typeof(Uri), index => { return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index)); } }, }; } public static bool CanGenerateObject(Type type) { return DefaultGenerators.ContainsKey(type); } public object GenerateObject(Type type) { return DefaultGenerators[type](++_index); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Reflection; using System.Runtime.CompilerServices; namespace System { internal sealed partial class RuntimeType : TypeInfo, ICloneable { public override Assembly Assembly => RuntimeTypeHandle.GetAssembly(this); public override Type? BaseType => GetBaseType(); public override bool IsByRefLike => RuntimeTypeHandle.IsByRefLike(this); public override bool IsConstructedGenericType => IsGenericType && !IsGenericTypeDefinition; public override bool IsGenericType => RuntimeTypeHandle.HasInstantiation(this); public override bool IsGenericTypeDefinition => RuntimeTypeHandle.IsGenericTypeDefinition(this); public override bool IsGenericParameter => RuntimeTypeHandle.IsGenericVariable(this); public override bool IsTypeDefinition => RuntimeTypeHandle.IsTypeDefinition(this); public override bool IsSecurityCritical => true; public override bool IsSecuritySafeCritical => false; public override bool IsSecurityTransparent => false; public override MemberTypes MemberType => (IsPublic || IsNotPublic) ? MemberTypes.TypeInfo : MemberTypes.NestedType; public override int MetadataToken => RuntimeTypeHandle.GetToken(this); public override Module Module => GetRuntimeModule(); public override Type? ReflectedType => DeclaringType; public override RuntimeTypeHandle TypeHandle => new RuntimeTypeHandle(this); public override Type UnderlyingSystemType => this; public object Clone() => this; public override bool Equals(object? obj) { // ComObjects are identified by the instance of the Type object and not the TypeHandle. return obj == (object)this; } public override int GetArrayRank() { if (!IsArrayImpl()) throw new ArgumentException(SR.Argument_HasToBeArrayClass); return RuntimeTypeHandle.GetArrayRank(this); } protected override TypeAttributes GetAttributeFlagsImpl() => RuntimeTypeHandle.GetAttributes(this); public override object[] GetCustomAttributes(bool inherit) { return CustomAttribute.GetCustomAttributes(this, ObjectType, inherit); } public override object[] GetCustomAttributes(Type attributeType, bool inherit) { if (attributeType is null) throw new ArgumentNullException(nameof(attributeType)); RuntimeType? attributeRuntimeType = attributeType.UnderlyingSystemType as RuntimeType; if (attributeRuntimeType == null) throw new ArgumentException(SR.Arg_MustBeType, nameof(attributeType)); return CustomAttribute.GetCustomAttributes(this, attributeRuntimeType, inherit); } public override IList<CustomAttributeData> GetCustomAttributesData() { return CustomAttributeData.GetCustomAttributesInternal(this); } // GetDefaultMembers // This will return a MemberInfo that has been marked with the [DefaultMemberAttribute] public override MemberInfo[] GetDefaultMembers() { // See if we have cached the default member name MemberInfo[] members = null!; string? defaultMemberName = GetDefaultMemberName(); if (defaultMemberName != null) { members = GetMember(defaultMemberName); } return members ?? Array.Empty<MemberInfo>(); } public override Type GetElementType() => RuntimeTypeHandle.GetElementType(this); public override string? GetEnumName(object value) { if (value == null) throw new ArgumentNullException(nameof(value)); Type valueType = value.GetType(); if (!(valueType.IsEnum || IsIntegerType(valueType))) throw new ArgumentException(SR.Arg_MustBeEnumBaseTypeOrEnum, nameof(value)); ulong ulValue = Enum.ToUInt64(value); return Enum.GetEnumName(this, ulValue); } public override string[] GetEnumNames() { if (!IsEnum) throw new ArgumentException(SR.Arg_MustBeEnum, "enumType"); string[] ret = Enum.InternalGetNames(this); // Make a copy since we can't hand out the same array since users can modify them return new ReadOnlySpan<string>(ret).ToArray(); } public override Array GetEnumValues() { if (!IsEnum) throw new ArgumentException(SR.Arg_MustBeEnum, "enumType"); // Get all of the values ulong[] values = Enum.InternalGetValues(this); // Create a generic Array Array ret = Array.CreateInstance(this, values.Length); for (int i = 0; i < values.Length; i++) { object val = Enum.ToObject(this, values[i]); ret.SetValue(val, i); } return ret; } public override Type GetEnumUnderlyingType() { if (!IsEnum) throw new ArgumentException(SR.Arg_MustBeEnum, "enumType"); return Enum.InternalGetUnderlyingType(this); } public override Type GetGenericTypeDefinition() { if (!IsGenericType) throw new InvalidOperationException(SR.InvalidOperation_NotGenericType); return RuntimeTypeHandle.GetGenericTypeDefinition(this); } public override int GetHashCode() => RuntimeHelpers.GetHashCode(this); internal RuntimeModule GetRuntimeModule() => RuntimeTypeHandle.GetModule(this); protected override TypeCode GetTypeCodeImpl() { TypeCode typeCode = Cache.TypeCode; if (typeCode != TypeCode.Empty) return typeCode; CorElementType corElementType = RuntimeTypeHandle.GetCorElementType(this); switch (corElementType) { case CorElementType.ELEMENT_TYPE_BOOLEAN: typeCode = TypeCode.Boolean; break; case CorElementType.ELEMENT_TYPE_CHAR: typeCode = TypeCode.Char; break; case CorElementType.ELEMENT_TYPE_I1: typeCode = TypeCode.SByte; break; case CorElementType.ELEMENT_TYPE_U1: typeCode = TypeCode.Byte; break; case CorElementType.ELEMENT_TYPE_I2: typeCode = TypeCode.Int16; break; case CorElementType.ELEMENT_TYPE_U2: typeCode = TypeCode.UInt16; break; case CorElementType.ELEMENT_TYPE_I4: typeCode = TypeCode.Int32; break; case CorElementType.ELEMENT_TYPE_U4: typeCode = TypeCode.UInt32; break; case CorElementType.ELEMENT_TYPE_I8: typeCode = TypeCode.Int64; break; case CorElementType.ELEMENT_TYPE_U8: typeCode = TypeCode.UInt64; break; case CorElementType.ELEMENT_TYPE_R4: typeCode = TypeCode.Single; break; case CorElementType.ELEMENT_TYPE_R8: typeCode = TypeCode.Double; break; case CorElementType.ELEMENT_TYPE_STRING: typeCode = TypeCode.String; break; case CorElementType.ELEMENT_TYPE_VALUETYPE: if (this == Convert.ConvertTypes[(int)TypeCode.Decimal]) typeCode = TypeCode.Decimal; else if (this == Convert.ConvertTypes[(int)TypeCode.DateTime]) typeCode = TypeCode.DateTime; else if (IsEnum) typeCode = GetTypeCode(Enum.GetUnderlyingType(this)); else typeCode = TypeCode.Object; break; default: if (this == Convert.ConvertTypes[(int)TypeCode.DBNull]) typeCode = TypeCode.DBNull; else if (this == Convert.ConvertTypes[(int)TypeCode.String]) typeCode = TypeCode.String; else typeCode = TypeCode.Object; break; } Cache.TypeCode = typeCode; return typeCode; } protected override bool HasElementTypeImpl() => RuntimeTypeHandle.HasElementType(this); protected override bool IsArrayImpl() => RuntimeTypeHandle.IsArray(this); protected override bool IsContextfulImpl() => false; public override bool IsDefined(Type attributeType, bool inherit) { if (attributeType is null) throw new ArgumentNullException(nameof(attributeType)); RuntimeType? attributeRuntimeType = attributeType.UnderlyingSystemType as RuntimeType; if (attributeRuntimeType == null) throw new ArgumentException(SR.Arg_MustBeType, nameof(attributeType)); return CustomAttribute.IsDefined(this, attributeRuntimeType, inherit); } public override bool IsEnumDefined(object value) { if (value == null) throw new ArgumentNullException(nameof(value)); // Check if both of them are of the same type RuntimeType valueType = (RuntimeType)value.GetType(); // If the value is an Enum then we need to extract the underlying value from it if (valueType.IsEnum) { if (!valueType.IsEquivalentTo(this)) throw new ArgumentException(SR.Format(SR.Arg_EnumAndObjectMustBeSameType, valueType, this)); valueType = (RuntimeType)valueType.GetEnumUnderlyingType(); } // If a string is passed in if (valueType == StringType) { // Get all of the Fields, calling GetHashEntry directly to avoid copying string[] names = Enum.InternalGetNames(this); return Array.IndexOf(names, value) >= 0; } // If an enum or integer value is passed in if (IsIntegerType(valueType)) { RuntimeType underlyingType = Enum.InternalGetUnderlyingType(this); if (underlyingType != valueType) throw new ArgumentException(SR.Format(SR.Arg_EnumUnderlyingTypeAndObjectMustBeSameType, valueType, underlyingType)); ulong[] ulValues = Enum.InternalGetValues(this); ulong ulValue = Enum.ToUInt64(value); return Array.BinarySearch(ulValues, ulValue) >= 0; } else { throw new InvalidOperationException(SR.InvalidOperation_UnknownEnumType); } } protected override bool IsValueTypeImpl() { // We need to return true for generic parameters with the ValueType constraint. // So we cannot use the faster RuntimeTypeHandle.IsValueType because it returns // false for all generic parameters. if (this == typeof(ValueType) || this == typeof(Enum)) return false; return IsSubclassOf(typeof(ValueType)); } protected override bool IsByRefImpl() => RuntimeTypeHandle.IsByRef(this); protected override bool IsPrimitiveImpl() => RuntimeTypeHandle.IsPrimitive(this); protected override bool IsPointerImpl() => RuntimeTypeHandle.IsPointer(this); protected override bool IsCOMObjectImpl() => RuntimeTypeHandle.IsComObject(this, false); public override bool IsInstanceOfType(object? o) => RuntimeTypeHandle.IsInstanceOfType(this, o); public override bool IsAssignableFrom(TypeInfo? typeInfo) { if (typeInfo == null) return false; return IsAssignableFrom(typeInfo.AsType()); } public override bool IsAssignableFrom(Type? c) { if (c is null) return false; if (ReferenceEquals(c, this)) return true; // For runtime type, let the VM decide. if (c.UnderlyingSystemType is RuntimeType fromType) { // both this and c (or their underlying system types) are runtime types return RuntimeTypeHandle.CanCastTo(fromType, this); } // Special case for TypeBuilder to be backward-compatible. if (c is System.Reflection.Emit.TypeBuilder) { // If c is a subclass of this class, then c can be cast to this type. if (c.IsSubclassOf(this)) return true; if (IsInterface) { return c.ImplementInterface(this); } else if (IsGenericParameter) { Type[] constraints = GetGenericParameterConstraints(); for (int i = 0; i < constraints.Length; i++) if (!constraints[i].IsAssignableFrom(c)) return false; return true; } } // For anything else we return false. return false; } private RuntimeType? GetBaseType() { if (IsInterface) return null; if (RuntimeTypeHandle.IsGenericVariable(this)) { Type[] constraints = GetGenericParameterConstraints(); RuntimeType baseType = ObjectType; for (int i = 0; i < constraints.Length; i++) { RuntimeType constraint = (RuntimeType)constraints[i]; if (constraint.IsInterface) continue; if (constraint.IsGenericParameter) { GenericParameterAttributes special = constraint.GenericParameterAttributes & GenericParameterAttributes.SpecialConstraintMask; if ((special & GenericParameterAttributes.ReferenceTypeConstraint) == 0 && (special & GenericParameterAttributes.NotNullableValueTypeConstraint) == 0) continue; } baseType = constraint; } if (baseType == ObjectType) { GenericParameterAttributes special = GenericParameterAttributes & GenericParameterAttributes.SpecialConstraintMask; if ((special & GenericParameterAttributes.NotNullableValueTypeConstraint) != 0) baseType = ValueType; } return baseType; } return RuntimeTypeHandle.GetBaseType(this); } } }
// UInt16Test.cs - NUnit Test Cases for the System.UInt16 struct // // Mario Martinez ([email protected]) // // (C) Ximian, Inc. http://www.ximian.com // using NUnit.Framework; using System; using System.Threading; using System.Globalization; namespace MonoTests.System { [TestFixture] public class UInt16Test : Assertion { private const UInt16 MyUInt16_1 = 42; private const UInt16 MyUInt16_2 = 0; private const UInt16 MyUInt16_3 = 65535; private const string MyString1 = "42"; private const string MyString2 = "0"; private const string MyString3 = "65535"; private string[] Formats1 = {"c", "d", "e", "f", "g", "n", "p", "x" }; private string[] Formats2 = {"c5", "d5", "e5", "f5", "g5", "n5", "p5", "x5" }; private string[] Results1 = {null, "0", "0.000000e+000", "0.00", "0", "0.00", "0.00 %", "0"}; private string[] Results2 = {null, "65535", "6.55350e+004", "65535.00000", "65535", "65,535.00000", "6,553,500.00000 %", "0ffff"}; private string[] ResultsNfi1 = {NumberFormatInfo.InvariantInfo.CurrencySymbol+"0.00", "0", "0.000000e+000", "0.00", "0", "0.00", "0.00 %", "0"}; private string[] ResultsNfi2 = {NumberFormatInfo.InvariantInfo.CurrencySymbol+"65,535.00000", "65535", "6.55350e+004", "65535.00000", "65535", "65,535.00000", "6,553,500.00000 %", "0ffff"}; private NumberFormatInfo Nfi = NumberFormatInfo.InvariantInfo; private CultureInfo old_culture; [TestFixtureSetUp] public void SetUp () { old_culture = Thread.CurrentThread.CurrentCulture; // Set culture to en-US and don't let the user override. Thread.CurrentThread.CurrentCulture = new CultureInfo ("en-US", false); string decimals = new String ('0', NumberFormatInfo.CurrentInfo.NumberDecimalDigits); string perPattern = new string[] {"n %","n%","%n"} [NumberFormatInfo.CurrentInfo.PercentPositivePattern]; Results1 [0] = NumberFormatInfo.CurrentInfo.CurrencySymbol + "0.00"; Results1 [3] = "0." + decimals; Results1 [5] = "0." + decimals; Results1 [6] = perPattern.Replace ("n","0.00"); Results2 [0] = NumberFormatInfo.CurrentInfo.CurrencySymbol + "65,535.00000"; Results2 [6] = perPattern.Replace ("n","6,553,500.00000"); } [TestFixtureTearDown] public void TearDown () { Thread.CurrentThread.CurrentCulture = old_culture; } public void TestMinMax() { AssertEquals(UInt16.MinValue, MyUInt16_2); AssertEquals(UInt16.MaxValue, MyUInt16_3); } public void TestCompareTo() { Assert(MyUInt16_3.CompareTo(MyUInt16_2) > 0); Assert(MyUInt16_2.CompareTo(MyUInt16_2) == 0); Assert(MyUInt16_1.CompareTo((UInt16)(42)) == 0); Assert(MyUInt16_2.CompareTo(MyUInt16_3) < 0); try { MyUInt16_2.CompareTo((object)100); Fail("Should raise a System.ArgumentException"); } catch (Exception e) { Assert(typeof(ArgumentException) == e.GetType()); } } public void TestEquals() { Assert(MyUInt16_1.Equals(MyUInt16_1)); Assert(MyUInt16_1.Equals((object)(UInt16)(42))); Assert(MyUInt16_1.Equals((object)(SByte)(42)) == false); Assert(MyUInt16_1.Equals(MyUInt16_2) == false); } public void TestGetHashCode() { try { MyUInt16_1.GetHashCode(); MyUInt16_2.GetHashCode(); MyUInt16_3.GetHashCode(); } catch { Fail("GetHashCode should not raise an exception here"); } } public void TestParse() { //test Parse(string s) Assert(MyUInt16_1 == UInt16.Parse(MyString1)); Assert(MyUInt16_2 == UInt16.Parse(MyString2)); Assert(MyUInt16_3 == UInt16.Parse(MyString3)); try { UInt16.Parse(null); Fail("Should raise a System.ArgumentNullException"); } catch (Exception e) { Assert(typeof(ArgumentNullException) == e.GetType()); } try { UInt16.Parse("not-a-number"); Fail("Should raise a System.FormatException"); } catch (Exception e) { Assert(typeof(FormatException) == e.GetType()); } try { int OverInt = UInt16.MaxValue + 1; UInt16.Parse(OverInt.ToString()); Fail("Should raise a System.OverflowException"); } catch (Exception e) { Assert(typeof(OverflowException) == e.GetType()); } //test Parse(string s, NumberStyles style) Assert(42 == UInt16.Parse(" "+NumberFormatInfo.CurrentInfo.CurrencySymbol+"42 ", NumberStyles.Currency)); try { UInt16.Parse("$42", NumberStyles.Integer); Fail("Should raise a System.FormatException"); } catch (Exception e) { Assert(typeof(FormatException) == e.GetType()); } //test Parse(string s, IFormatProvider provider) Assert(42 == UInt16.Parse(" 42 ", Nfi)); try { UInt16.Parse("%42", Nfi); Fail("Should raise a System.FormatException"); } catch (Exception e) { Assert(typeof(FormatException) == e.GetType()); } //test Parse(string s, NumberStyles style, IFormatProvider provider) Assert(16 == UInt16.Parse(" 10 ", NumberStyles.HexNumber, Nfi)); try { UInt16.Parse("$42", NumberStyles.Integer, Nfi); Fail("Should raise a System.FormatException"); } catch (Exception e) { Assert(typeof(FormatException) == e.GetType()); } } public void TestToString() { //test ToString() AssertEquals("A1", MyString1, MyUInt16_1.ToString()); AssertEquals("A2", MyString2, MyUInt16_2.ToString()); AssertEquals("A3", MyString3, MyUInt16_3.ToString()); //test ToString(string format) for (int i=0; i < Formats1.Length; i++) { Console.WriteLine ("d:" + NumberFormatInfo.CurrentInfo.NumberDecimalDigits); AssertEquals("A4:"+i.ToString(), Results1[i], MyUInt16_2.ToString(Formats1[i])); AssertEquals("A5:"+i.ToString(), Results2[i], MyUInt16_3.ToString(Formats2[i])); } //test ToString(string format, IFormatProvider provider); for (int i=0; i < Formats1.Length; i++) { AssertEquals("A6:"+i.ToString(), ResultsNfi1[i], MyUInt16_2.ToString(Formats1[i], Nfi)); AssertEquals("A7:"+i.ToString(), ResultsNfi2[i], MyUInt16_3.ToString(Formats2[i], Nfi)); } try { MyUInt16_1.ToString("z"); Fail("Should raise a System.FormatException"); } catch (Exception e) { Assert("A8", typeof(FormatException) == e.GetType()); } } [Test] public void ToString_Defaults () { UInt16 i = 254; // everything defaults to "G" string def = i.ToString ("G"); AssertEquals ("ToString()", def, i.ToString ()); AssertEquals ("ToString((IFormatProvider)null)", def, i.ToString ((IFormatProvider)null)); AssertEquals ("ToString((string)null)", def, i.ToString ((string)null)); AssertEquals ("ToString(empty)", def, i.ToString (String.Empty)); AssertEquals ("ToString(null,null)", def, i.ToString (null, null)); AssertEquals ("ToString(empty,null)", def, i.ToString (String.Empty, null)); AssertEquals ("ToString(G)", "254", def); } } }
using System; using System.Collections.Generic; using ModestTree; #if !NOT_UNITY3D using UnityEngine; #endif namespace Zenject { public class FactoryFromBinderBase : ArgConditionCopyNonLazyBinder { public FactoryFromBinderBase( DiContainer bindContainer, Type contractType, BindInfo bindInfo, FactoryBindInfo factoryBindInfo) : base(bindInfo) { FactoryBindInfo = factoryBindInfo; BindContainer = bindContainer; ContractType = contractType; factoryBindInfo.ProviderFunc = (container) => new TransientProvider( ContractType, container, BindInfo.Arguments, BindInfo.ContextInfo, BindInfo.ConcreteIdentifier); } protected DiContainer BindContainer { get; private set; } protected FactoryBindInfo FactoryBindInfo { get; private set; } protected Func<DiContainer, IProvider> ProviderFunc { get { return FactoryBindInfo.ProviderFunc; } set { FactoryBindInfo.ProviderFunc = value; } } protected Type ContractType { get; private set; } public IEnumerable<Type> AllParentTypes { get { yield return ContractType; foreach (var type in BindInfo.ToTypes) { yield return type; } } } // Note that this isn't necessary to call since it's the default public ConditionCopyNonLazyBinder FromNew() { BindingUtil.AssertIsNotComponent(ContractType); BindingUtil.AssertIsNotAbstract(ContractType); return this; } public ConditionCopyNonLazyBinder FromResolve() { return FromResolve(null); } public ConditionCopyNonLazyBinder FromInstance(object instance) { BindingUtil.AssertInstanceDerivesFromOrEqual(instance, AllParentTypes); ProviderFunc = (container) => new InstanceProvider(ContractType, instance, container); return this; } public ConditionCopyNonLazyBinder FromResolve(object subIdentifier) { ProviderFunc = (container) => new ResolveProvider( ContractType, container, subIdentifier, false, InjectSources.Any, false); return this; } protected ConcreteBinderGeneric<T> CreateIFactoryBinder<T>(out Guid factoryId) { // Use a random ID so that our provider is the only one that can find it and so it doesn't // conflict with anything else factoryId = Guid.NewGuid(); // Very important here that we use NoFlush otherwise the main binding will be finalized early return BindContainer.BindNoFlush<T>().WithId(factoryId); } #if !NOT_UNITY3D public ConditionCopyNonLazyBinder FromComponentOn(GameObject gameObject) { BindingUtil.AssertIsValidGameObject(gameObject); BindingUtil.AssertIsComponent(ContractType); BindingUtil.AssertIsNotAbstract(ContractType); ProviderFunc = (container) => new GetFromGameObjectComponentProvider( ContractType, gameObject, true); return this; } public ConditionCopyNonLazyBinder FromComponentOn(Func<InjectContext, GameObject> gameObjectGetter) { BindingUtil.AssertIsComponent(ContractType); BindingUtil.AssertIsNotAbstract(ContractType); ProviderFunc = (container) => new GetFromGameObjectGetterComponentProvider( ContractType, gameObjectGetter, true); return this; } public ConditionCopyNonLazyBinder FromComponentOnRoot() { return FromComponentOn( ctx => BindContainer.Resolve<Context>().gameObject); } public ConditionCopyNonLazyBinder FromNewComponentOn(GameObject gameObject) { BindingUtil.AssertIsValidGameObject(gameObject); BindingUtil.AssertIsComponent(ContractType); BindingUtil.AssertIsNotAbstract(ContractType); ProviderFunc = (container) => new AddToExistingGameObjectComponentProvider( gameObject, container, ContractType, new List<TypeValuePair>(), BindInfo.ConcreteIdentifier); return this; } public ConditionCopyNonLazyBinder FromNewComponentOn( Func<InjectContext, GameObject> gameObjectGetter) { BindingUtil.AssertIsComponent(ContractType); BindingUtil.AssertIsNotAbstract(ContractType); ProviderFunc = (container) => new AddToExistingGameObjectComponentProviderGetter( gameObjectGetter, container, ContractType, new List<TypeValuePair>(), BindInfo.ConcreteIdentifier); return this; } public NameTransformConditionCopyNonLazyBinder FromNewComponentOnNewGameObject() { BindingUtil.AssertIsComponent(ContractType); BindingUtil.AssertIsNotAbstract(ContractType); var gameObjectInfo = new GameObjectCreationParameters(); ProviderFunc = (container) => new AddToNewGameObjectComponentProvider( container, ContractType, new List<TypeValuePair>(), gameObjectInfo, BindInfo.ConcreteIdentifier); return new NameTransformConditionCopyNonLazyBinder(BindInfo, gameObjectInfo); } public NameTransformConditionCopyNonLazyBinder FromNewComponentOnNewPrefab(UnityEngine.Object prefab) { BindingUtil.AssertIsValidPrefab(prefab); BindingUtil.AssertIsComponent(ContractType); BindingUtil.AssertIsNotAbstract(ContractType); var gameObjectInfo = new GameObjectCreationParameters(); ProviderFunc = (container) => new InstantiateOnPrefabComponentProvider( ContractType, new PrefabInstantiator( container, gameObjectInfo, ContractType, new List<TypeValuePair>(), new PrefabProvider(prefab))); return new NameTransformConditionCopyNonLazyBinder(BindInfo, gameObjectInfo); } public NameTransformConditionCopyNonLazyBinder FromComponentInNewPrefab(UnityEngine.Object prefab) { BindingUtil.AssertIsValidPrefab(prefab); BindingUtil.AssertIsInterfaceOrComponent(ContractType); var gameObjectInfo = new GameObjectCreationParameters(); ProviderFunc = (container) => new GetFromPrefabComponentProvider( ContractType, new PrefabInstantiator( container, gameObjectInfo, ContractType, new List<TypeValuePair>(), new PrefabProvider(prefab)), true); return new NameTransformConditionCopyNonLazyBinder(BindInfo, gameObjectInfo); } public NameTransformConditionCopyNonLazyBinder FromComponentInNewPrefabResource(string resourcePath) { BindingUtil.AssertIsValidResourcePath(resourcePath); BindingUtil.AssertIsInterfaceOrComponent(ContractType); var gameObjectInfo = new GameObjectCreationParameters(); ProviderFunc = (container) => new GetFromPrefabComponentProvider( ContractType, new PrefabInstantiator( container, gameObjectInfo, ContractType, new List<TypeValuePair>(), new PrefabProviderResource(resourcePath)), true); return new NameTransformConditionCopyNonLazyBinder(BindInfo, gameObjectInfo); } public NameTransformConditionCopyNonLazyBinder FromNewComponentOnNewPrefabResource(string resourcePath) { BindingUtil.AssertIsValidResourcePath(resourcePath); BindingUtil.AssertIsComponent(ContractType); BindingUtil.AssertIsNotAbstract(ContractType); var gameObjectInfo = new GameObjectCreationParameters(); ProviderFunc = (container) => new InstantiateOnPrefabComponentProvider( ContractType, new PrefabInstantiator( container, gameObjectInfo, ContractType, new List<TypeValuePair>(), new PrefabProviderResource(resourcePath))); return new NameTransformConditionCopyNonLazyBinder(BindInfo, gameObjectInfo); } public ConditionCopyNonLazyBinder FromNewScriptableObjectResource(string resourcePath) { BindingUtil.AssertIsValidResourcePath(resourcePath); BindingUtil.AssertIsInterfaceOrScriptableObject(ContractType); ProviderFunc = (container) => new ScriptableObjectResourceProvider( resourcePath, ContractType, container, new List<TypeValuePair>(), true, null); return this; } public ConditionCopyNonLazyBinder FromScriptableObjectResource(string resourcePath) { BindingUtil.AssertIsValidResourcePath(resourcePath); BindingUtil.AssertIsInterfaceOrScriptableObject(ContractType); ProviderFunc = (container) => new ScriptableObjectResourceProvider( resourcePath, ContractType, container, new List<TypeValuePair>(), false, null); return this; } public ConditionCopyNonLazyBinder FromResource(string resourcePath) { BindingUtil.AssertDerivesFromUnityObject(ContractType); ProviderFunc = (container) => new ResourceProvider(resourcePath, ContractType, true); return this; } #endif } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using System.Text.Utf8; namespace Json.Net.Tests { public struct JsonParseObject { private byte[] _buffer; private int _start; private int _end; public JsonParseObject(byte[] buffer, int start, int end) { _buffer = buffer; _start = start; _end = end; } public bool HasValue() { var typeCode = _buffer[_start + 8]; if (typeCode == (byte)JsonParser.JsonDb.JsonValueType.Object || typeCode == (byte)JsonParser.JsonDb.JsonValueType.Array) { int length = BitConverter.ToInt32(_buffer, _start + 4); if (length == 0) return false; return true; } else { int location = BitConverter.ToInt32(_buffer, _start); if (_buffer[location - 1] == '"' && _buffer[location + 4] == '"') { return true; } return (_buffer[location] != 'n' || _buffer[location + 1] != 'u' || _buffer[location + 2] != 'l' || _buffer[location + 3] != 'l'); } } public JsonParseObject this[string index] { get { int length = BitConverter.ToInt32(_buffer, _start + 4); var typeCode = _buffer[_start + 8]; if (length == 0) { throw new KeyNotFoundException(); } if (typeCode != (byte)JsonParser.JsonDb.JsonValueType.Object) { throw new NullReferenceException(); } for (int i = _start + RowSize; i <= _end; i += RowSize) { length = BitConverter.ToInt32(_buffer, i + 4); typeCode = _buffer[i + 8]; if (typeCode == (byte)JsonParser.JsonDb.JsonValueType.Object || typeCode == (byte)JsonParser.JsonDb.JsonValueType.Array) { i += length * RowSize; continue; } int location = BitConverter.ToInt32(_buffer, i); if (isEqual(index, _buffer, location, length)) { int newStart = i + RowSize; int newEnd = newStart + RowSize; typeCode = _buffer[newStart + 8]; if (typeCode == (byte)JsonParser.JsonDb.JsonValueType.Object || typeCode == (byte)JsonParser.JsonDb.JsonValueType.Array) { length = BitConverter.ToInt32(_buffer, newStart + 4); newEnd = newEnd + RowSize * length; } return new JsonParseObject(_buffer, newStart, newEnd); } typeCode = _buffer[i + RowSize + 8]; if (typeCode != (byte)JsonParser.JsonDb.JsonValueType.Object && typeCode != (byte)JsonParser.JsonDb.JsonValueType.Array) { i += RowSize; } } throw new KeyNotFoundException(); } } private bool isEqual(string str, byte[] buffer, int location, int length) { if (str.Length != length) return false; for (int i = 0; i < length; i++) { if (str[i] != buffer[location + i]) { return false; } } return true; } public JsonParseObject this[int index] { get { int length = BitConverter.ToInt32(_buffer, _start + 4); var typeCode = _buffer[_start + 8]; if (index < 0 || index >= length) { throw new IndexOutOfRangeException(); } if (typeCode != (byte)JsonParser.JsonDb.JsonValueType.Array) { throw new NullReferenceException(); } int counter = 0; for (int i = _start + RowSize; i <= _end; i += RowSize) { typeCode = _buffer[i + 8]; if (index == counter) { int newStart = i; int newEnd = i + RowSize; if (typeCode == (byte)JsonParser.JsonDb.JsonValueType.Object || typeCode == (byte)JsonParser.JsonDb.JsonValueType.Array) { length = BitConverter.ToInt32(_buffer, i + 4); newEnd = newEnd + RowSize * length; } return new JsonParseObject(_buffer, newStart, newEnd); } if (typeCode == (byte)JsonParser.JsonDb.JsonValueType.Object || typeCode == (byte)JsonParser.JsonDb.JsonValueType.Array) { length = BitConverter.ToInt32(_buffer, i + 4); i += length * RowSize; } counter++; } throw new IndexOutOfRangeException(); } } public static explicit operator string (JsonParseObject json) { return GetUtf8Span(json).ToString(); } public static explicit operator Utf8Span(JsonParseObject json) { return GetUtf8Span(json); } private static Utf8Span GetUtf8Span(JsonParseObject json) { var typeCode = json._buffer[json._start + 8]; if (typeCode == (byte)JsonParser.JsonDb.JsonValueType.Object || typeCode == (byte)JsonParser.JsonDb.JsonValueType.Array) { throw new InvalidCastException(); } int location = BitConverter.ToInt32(json._buffer, json._start); int length = BitConverter.ToInt32(json._buffer, json._start + 4); return new Utf8Span(json._buffer, location, length); } public static explicit operator bool (JsonParseObject json) { var typeCode = json._buffer[json._start + 8]; if (typeCode == (byte)JsonParser.JsonDb.JsonValueType.Object || typeCode == (byte)JsonParser.JsonDb.JsonValueType.Array) { throw new InvalidCastException(); } int length = BitConverter.ToInt32(json._buffer, json._start + 4); if (length < 4 || length > 5) { throw new InvalidCastException(); } int location = BitConverter.ToInt32(json._buffer, json._start); bool isTrue = json._buffer[location] == 't' && json._buffer[location + 1] == 'r' && json._buffer[location + 2] == 'u' && json._buffer[location + 3] == 'e'; bool isFalse = json._buffer[location] == 'f' && json._buffer[location + 1] == 'a' && json._buffer[location + 2] == 'l' && json._buffer[location + 3] == 's' && json._buffer[location + 4] == 'e'; if (isTrue) { return true; } else if (isFalse) { return false; } else { throw new InvalidCastException(); } } public static explicit operator int (JsonParseObject json) { var typeCode = json._buffer[json._start + 8]; if (typeCode == (byte)JsonParser.JsonDb.JsonValueType.Object || typeCode == (byte)JsonParser.JsonDb.JsonValueType.Array) { throw new InvalidCastException(); } int location = BitConverter.ToInt32(json._buffer, json._start); int length = BitConverter.ToInt32(json._buffer, json._start + 4); int count = location; bool isNegative = false; var nextByte = json._buffer[count]; if (nextByte == '-') { isNegative = true; count++; } int result = 0; while (count - location < length) { nextByte = json._buffer[count]; if (nextByte < '0' || nextByte > '9') { throw new InvalidCastException(); // return isNegative ? result * -1 : result; } int digit = nextByte - '0'; result = result * 10 + digit; count++; } return isNegative ? result * -1 : result; } public static explicit operator double (JsonParseObject json) { var typeCode = json._buffer[json._start + 8]; if (typeCode == (byte)JsonParser.JsonDb.JsonValueType.Object || typeCode == (byte)JsonParser.JsonDb.JsonValueType.Array) { throw new InvalidCastException(); } int location = BitConverter.ToInt32(json._buffer, json._start); int length = BitConverter.ToInt32(json._buffer, json._start + 4); int count = location; bool isNegative = false; var nextByte = json._buffer[count]; if (nextByte == '-') { isNegative = true; count++; nextByte = json._buffer[count]; } if (nextByte < '0' || nextByte > '9' || count - location >= length) { throw new InvalidCastException(); } int integerPart = 0; while (nextByte >= '0' && nextByte <= '9' && count - location < length) { int digit = nextByte - '0'; integerPart = integerPart * 10 + digit; count++; nextByte = json._buffer[count]; } double result = integerPart; int decimalPart = 0; if (nextByte == '.') { count++; int numberOfDigits = count; nextByte = json._buffer[count]; while (nextByte >= '0' && nextByte <= '9' && count - location < length) { int digit = nextByte - '0'; decimalPart = decimalPart * 10 + digit; count++; nextByte = json._buffer[count]; } numberOfDigits = count - numberOfDigits; double divisor = Math.Pow(10, numberOfDigits); result += decimalPart / divisor; } int exponentPart = 0; bool isExpNegative = false; if (nextByte == 'e' || nextByte == 'E') { count++; nextByte = json._buffer[count]; if (nextByte == '-' || nextByte == '+') { if (nextByte == '-') { isExpNegative = true; } count++; } nextByte = json._buffer[count]; while (nextByte >= '0' && nextByte <= '9' && count - location < length) { int digit = nextByte - '0'; exponentPart = exponentPart * 10 + digit; count++; nextByte = json._buffer[count]; } result *= (Math.Pow(10, isExpNegative ? exponentPart * -1 : exponentPart)); } if (count - location > length) { throw new InvalidCastException(); } return isNegative ? result * -1 : result; } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System.Collections.Generic; using System.Composition; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.CSharp.Extensions; using Microsoft.CodeAnalysis.CSharp.Symbols; using Microsoft.CodeAnalysis.CSharp.Syntax; using Microsoft.CodeAnalysis.DocumentationComments; using Microsoft.CodeAnalysis.LanguageServices; using Microsoft.CodeAnalysis.Shared.Extensions; using Microsoft.CodeAnalysis.SignatureHelp; using Microsoft.CodeAnalysis.Text; using Roslyn.Utilities; namespace Microsoft.CodeAnalysis.CSharp.SignatureHelp { [ExportSignatureHelpProvider("AttributeSignatureHelpProvider", LanguageNames.CSharp), Shared] internal partial class AttributeSignatureHelpProvider : AbstractCSharpSignatureHelpProvider { public override bool IsTriggerCharacter(char ch) { return ch == '(' || ch == ','; } public override bool IsRetriggerCharacter(char ch) { return ch == ')'; } private bool TryGetAttributeExpression(SyntaxNode root, int position, ISyntaxFactsService syntaxFacts, SignatureHelpTriggerReason triggerReason, CancellationToken cancellationToken, out AttributeSyntax attribute) { if (!CommonSignatureHelpUtilities.TryGetSyntax(root, position, syntaxFacts, triggerReason, IsTriggerToken, IsArgumentListToken, cancellationToken, out attribute)) { return false; } return attribute.ArgumentList != null; } private bool IsTriggerToken(SyntaxToken token) { return !token.IsKind(SyntaxKind.None) && token.ValueText.Length == 1 && IsTriggerCharacter(token.ValueText[0]) && token.Parent is AttributeArgumentListSyntax && token.Parent.Parent is AttributeSyntax; } private static bool IsArgumentListToken(AttributeSyntax expression, SyntaxToken token) { return expression.ArgumentList != null && expression.ArgumentList.Span.Contains(token.SpanStart) && token != expression.ArgumentList.CloseParenToken; } protected override async Task<SignatureHelpItems> GetItemsWorkerAsync(Document document, int position, SignatureHelpTriggerInfo triggerInfo, CancellationToken cancellationToken) { var root = await document.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false); AttributeSyntax attribute; if (!TryGetAttributeExpression(root, position, document.GetLanguageService<ISyntaxFactsService>(), triggerInfo.TriggerReason, cancellationToken, out attribute)) { return null; } var semanticModel = await document.GetSemanticModelForNodeAsync(attribute, cancellationToken).ConfigureAwait(false); var attributeType = semanticModel.GetTypeInfo(attribute, cancellationToken).Type as INamedTypeSymbol; if (attributeType == null) { return null; } var within = semanticModel.GetEnclosingNamedTypeOrAssembly(position, cancellationToken); if (within == null) { return null; } var symbolDisplayService = document.Project.LanguageServices.GetService<ISymbolDisplayService>(); var accessibleConstructors = attributeType.InstanceConstructors .Where(c => c.IsAccessibleWithin(within)) .FilterToVisibleAndBrowsableSymbols(document.ShouldHideAdvancedMembers(), semanticModel.Compilation) .Sort(symbolDisplayService, semanticModel, attribute.SpanStart); if (!accessibleConstructors.Any()) { return null; } var anonymousTypeDisplayService = document.Project.LanguageServices.GetService<IAnonymousTypeDisplayService>(); var documentationCommentFormatter = document.Project.LanguageServices.GetService<IDocumentationCommentFormattingService>(); var textSpan = SignatureHelpUtilities.GetSignatureHelpSpan(attribute.ArgumentList); var syntaxFacts = document.GetLanguageService<ISyntaxFactsService>(); return CreateSignatureHelpItems(accessibleConstructors.Select(c => Convert(c, within, attribute, semanticModel, symbolDisplayService, anonymousTypeDisplayService, documentationCommentFormatter, cancellationToken)).ToList(), textSpan, GetCurrentArgumentState(root, position, syntaxFacts, textSpan, cancellationToken)); } public override SignatureHelpState GetCurrentArgumentState(SyntaxNode root, int position, ISyntaxFactsService syntaxFacts, TextSpan currentSpan, CancellationToken cancellationToken) { AttributeSyntax expression; if (TryGetAttributeExpression(root, position, syntaxFacts, SignatureHelpTriggerReason.InvokeSignatureHelpCommand, cancellationToken, out expression) && currentSpan.Start == SignatureHelpUtilities.GetSignatureHelpSpan(expression.ArgumentList).Start) { return SignatureHelpUtilities.GetSignatureHelpState(expression.ArgumentList, position); } return null; } private SignatureHelpItem Convert( IMethodSymbol constructor, ISymbol within, AttributeSyntax attribute, SemanticModel semanticModel, ISymbolDisplayService symbolDisplayService, IAnonymousTypeDisplayService anonymousTypeDisplayService, IDocumentationCommentFormattingService documentationCommentFormatter, CancellationToken cancellationToken) { var position = attribute.SpanStart; var namedParameters = constructor.ContainingType.GetAttributeNamedParameters(semanticModel.Compilation, within) .OrderBy(s => s.Name) .ToList(); var isVariadic = constructor.Parameters.Length > 0 && constructor.Parameters.Last().IsParams && namedParameters.Count == 0; var item = CreateItem( constructor, semanticModel, position, symbolDisplayService, anonymousTypeDisplayService, isVariadic, constructor.GetDocumentationPartsFactory(semanticModel, position, documentationCommentFormatter), GetPreambleParts(constructor, semanticModel, position), GetSeparatorParts(), GetPostambleParts(constructor), GetParameters(constructor, semanticModel, position, namedParameters, documentationCommentFormatter, cancellationToken)); return item; } private IList<SignatureHelpSymbolParameter> GetParameters( IMethodSymbol constructor, SemanticModel semanticModel, int position, IList<ISymbol> namedParameters, IDocumentationCommentFormattingService documentationCommentFormatter, CancellationToken cancellationToken) { var result = new List<SignatureHelpSymbolParameter>(); foreach (var parameter in constructor.Parameters) { result.Add(Convert(parameter, semanticModel, position, documentationCommentFormatter, cancellationToken)); } for (int i = 0; i < namedParameters.Count; i++) { cancellationToken.ThrowIfCancellationRequested(); var namedParameter = namedParameters[i]; var type = namedParameter is IFieldSymbol ? ((IFieldSymbol)namedParameter).Type : ((IPropertySymbol)namedParameter).Type; var displayParts = new List<SymbolDisplayPart>(); displayParts.Add(new SymbolDisplayPart( namedParameter is IFieldSymbol ? SymbolDisplayPartKind.FieldName : SymbolDisplayPartKind.PropertyName, namedParameter, namedParameter.Name.ToIdentifierToken().ToString())); displayParts.Add(Space()); displayParts.Add(Punctuation(SyntaxKind.EqualsToken)); displayParts.Add(Space()); displayParts.AddRange(type.ToMinimalDisplayParts(semanticModel, position)); result.Add(new SignatureHelpSymbolParameter( namedParameter.Name, isOptional: true, documentationFactory: namedParameter.GetDocumentationPartsFactory(semanticModel, position, documentationCommentFormatter), displayParts: displayParts, prefixDisplayParts: GetParameterPrefixDisplayParts(i))); } return result; } private static List<SymbolDisplayPart> GetParameterPrefixDisplayParts(int i) { if (i == 0) { return new List<SymbolDisplayPart> { new SymbolDisplayPart(SymbolDisplayPartKind.Text, null, CSharpFeaturesResources.Properties), Punctuation(SyntaxKind.ColonToken), Space() }; } return null; } private IList<SymbolDisplayPart> GetPreambleParts( IMethodSymbol method, SemanticModel semanticModel, int position) { var result = new List<SymbolDisplayPart>(); result.AddRange(method.ContainingType.ToMinimalDisplayParts(semanticModel, position)); result.Add(Punctuation(SyntaxKind.OpenParenToken)); return result; } private IList<SymbolDisplayPart> GetPostambleParts(IMethodSymbol method) { return SpecializedCollections.SingletonList( Punctuation(SyntaxKind.CloseParenToken)); } } }
// // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. using System; using System.Collections.Generic; using System.Linq; using Hyak.Common; using Microsoft.Azure.Search.Models; namespace Microsoft.Azure.Search.Models { /// <summary> /// Parameters for filtering, sorting, faceting, paging, and other search /// query behaviors. /// </summary> public partial class SearchParameters { private string _filter; /// <summary> /// Optional. Gets or sets the OData $filter expression to apply to the /// search query. (see /// https://msdn.microsoft.com/library/azure/dn798921.aspx for more /// information) /// </summary> public string Filter { get { return this._filter; } set { this._filter = value; } } private IList<string> _highlightFields; /// <summary> /// Optional. Gets or sets the list of field names to use for hit /// highlights. Only searchable fields can be used for hit /// highlighting. /// </summary> public IList<string> HighlightFields { get { return this._highlightFields; } set { this._highlightFields = value; } } private string _highlightPostTag; /// <summary> /// Optional. Gets or sets a string tag that is appended to hit /// highlights. Must be set with HighlightPreTag. Default is /// &lt;/em&gt;. /// </summary> public string HighlightPostTag { get { return this._highlightPostTag; } set { this._highlightPostTag = value; } } private string _highlightPreTag; /// <summary> /// Optional. Gets or sets a string tag that is prepended to hit /// highlights. Must be set with HighlightPostTag. Default is /// &lt;em&gt;. /// </summary> public string HighlightPreTag { get { return this._highlightPreTag; } set { this._highlightPreTag = value; } } private bool _includeTotalResultCount; /// <summary> /// Optional. Gets or sets a value that specifies whether to fetch the /// total count of results. Default is false. Setting this value to /// true may have a performance impact. Note that the count returned /// is an approximation. /// </summary> public bool IncludeTotalResultCount { get { return this._includeTotalResultCount; } set { this._includeTotalResultCount = value; } } private IList<string> _orderBy; /// <summary> /// Optional. Gets or sets the list of OData $orderby expressions by /// which to sort the results. Each expression can be either a field /// name or a call to the geo.distance() function. Each expression can /// be followed by asc to indicate ascending, and desc to indicate /// descending. The default is ascending order. Ties will be broken by /// the match scores of documents. If no OrderBy is specified, the /// default sort order is descending by document match score. There /// can be at most 32 Orderby clauses. /// </summary> public IList<string> OrderBy { get { return this._orderBy; } set { this._orderBy = value; } } private IList<string> _scoringParameters; /// <summary> /// Optional. Gets or sets the list of parameter values to be used in /// scoring functions (for example, referencePointParameter) using the /// format name:value. For example, if the scoring profile defines a /// function with a parameter called 'mylocation' the parameter string /// would be "mylocation:-122.2,44.8"(without the quotes). /// </summary> public IList<string> ScoringParameters { get { return this._scoringParameters; } set { this._scoringParameters = value; } } private string _scoringProfile; /// <summary> /// Optional. Gets or sets the name of a scoring profile to evaluate /// match scores for matching documents in order to sort the results. /// </summary> public string ScoringProfile { get { return this._scoringProfile; } set { this._scoringProfile = value; } } private IList<string> _searchFields; /// <summary> /// Optional. Gets or sets the list of field names to include in the /// full-text search. /// </summary> public IList<string> SearchFields { get { return this._searchFields; } set { this._searchFields = value; } } private SearchMode _searchMode; /// <summary> /// Optional. Gets or sets a value that specifies whether any or all of /// the search terms must be matched in order to count the document as /// a match. /// </summary> public SearchMode SearchMode { get { return this._searchMode; } set { this._searchMode = value; } } private IList<string> _select; /// <summary> /// Optional. Gets or sets the list of fields to retrieve. If /// unspecified, all fields marked as retrievable in the schema are /// included. /// </summary> public IList<string> Select { get { return this._select; } set { this._select = value; } } private int? _skip; /// <summary> /// Optional. Gets or sets the number of search results to skip. This /// value cannot be greater than 100,000. If you need to scan /// documents in sequence, but cannot use Skip due to this limitation, /// consider using OrderBy on a totally-ordered key and Filter with a /// range query instead. /// </summary> public int? Skip { get { return this._skip; } set { this._skip = value; } } private int? _top; /// <summary> /// Optional. Gets or sets the number of search results to retrieve. /// This defaults to 50. If you specify a value greater than 1000 for /// Top and there are more than 1000 results, only the first 1000 /// results will be returned, along with a continuation token. See /// DocumentSearchResponse.ContinuationToken for more information. /// </summary> public int? Top { get { return this._top; } set { this._top = value; } } /// <summary> /// Initializes a new instance of the SearchParameters class. /// </summary> public SearchParameters() { this.HighlightFields = new LazyList<string>(); this.OrderBy = new LazyList<string>(); this.ScoringParameters = new LazyList<string>(); this.SearchFields = new LazyList<string>(); this.Select = new LazyList<string>(); } } }
// Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using System; using System.Collections.Concurrent; using Hazelcast.Core; using Hazelcast.Net.Ext; namespace Hazelcast.IO.Serialization { internal sealed class PortableContext : IPortableContext { private readonly ConcurrentDictionary<int, ClassDefinitionContext> _classDefContextMap = new ConcurrentDictionary<int, ClassDefinitionContext>(); private readonly SerializationService _serializationService; private readonly int _version; internal PortableContext(SerializationService serializationService, int version) { _serializationService = serializationService; _version = version; } public int GetClassVersion(int factoryId, int classId) { return GetClassDefContext(factoryId).GetClassVersion(classId); } public void SetClassVersion(int factoryId, int classId, int version) { GetClassDefContext(factoryId).SetClassVersion(classId, version); } public IClassDefinition LookupClassDefinition(int factoryId, int classId, int version) { return GetClassDefContext(factoryId).Lookup(classId, version); } /// <exception cref="System.IO.IOException" /> public IClassDefinition LookupClassDefinition(IData data) { if (!data.IsPortable()) { throw new ArgumentException("Data is not Portable!"); } var @in = _serializationService.CreateObjectDataInput(data); var factoryId = @in.ReadInt(); var classId = @in.ReadInt(); var version = @in.ReadInt(); var classDefinition = LookupClassDefinition(factoryId, classId, version); if (classDefinition == null) { classDefinition = ReadClassDefinition(@in, factoryId, classId, version); } return classDefinition; } public IClassDefinition RegisterClassDefinition(IClassDefinition cd) { return GetClassDefContext(cd.GetFactoryId()).Register(cd); } /// <exception cref="System.IO.IOException" /> public IClassDefinition LookupOrRegisterClassDefinition(IPortable p) { var portableVersion = PortableVersionHelper.GetVersion(p, _version); var cd = LookupClassDefinition(p.GetFactoryId(), p.GetClassId(), portableVersion); if (cd == null) { var writer = new ClassDefinitionWriter(this, p.GetFactoryId(), p.GetClassId(), portableVersion); p.WritePortable(writer); cd = writer.RegisterAndGet(); } return cd; } public IFieldDefinition GetFieldDefinition(IClassDefinition classDef, string name) { var fd = classDef.GetField(name); if (fd == null) { var fieldNames = name.Split('.'); if (fieldNames.Length > 1) { var currentClassDef = classDef; for (var i = 0; i < fieldNames.Length; i++) { name = fieldNames[i]; fd = currentClassDef.GetField(name); if (i == fieldNames.Length - 1) { break; } if (fd == null) { throw new ArgumentException("Unknown field: " + name); } currentClassDef = LookupClassDefinition(fd.GetFactoryId(), fd.GetClassId(), fd.GetVersion()); if (currentClassDef == null) { throw new ArgumentException("Not a registered Portable field: " + fd); } } } } return fd; } public int GetVersion() { return _version; } public IManagedContext GetManagedContext() { return _serializationService.GetManagedContext(); } public ByteOrder GetByteOrder() { return _serializationService.GetByteOrder(); } /// <exception cref="System.IO.IOException" /> internal IClassDefinition ReadClassDefinition(IBufferObjectDataInput @in, int factoryId, int classId, int version) { var register = true; var builder = new ClassDefinitionBuilder(factoryId, classId, version); // final position after portable is read @in.ReadInt(); // field count var fieldCount = @in.ReadInt(); var offset = @in.Position(); for (var i = 0; i < fieldCount; i++) { var pos = @in.ReadInt(offset + i*Bits.IntSizeInBytes); @in.Position(pos); var len = @in.ReadShort(); var chars = new char[len]; for (var k = 0; k < len; k++) { chars[k] = (char) @in.ReadUnsignedByte(); } var type = (FieldType) (@in.ReadByte()); var name = new string(chars); var fieldFactoryId = 0; var fieldClassId = 0; int fieldVersion = version; if (type == FieldType.Portable) { // is null if (@in.ReadBoolean()) { register = false; } fieldFactoryId = @in.ReadInt(); fieldClassId = @in.ReadInt(); if (register) { fieldVersion = @in.ReadInt(); ReadClassDefinition(@in, fieldFactoryId, fieldClassId, fieldVersion); } } else { if (type == FieldType.PortableArray) { var k1 = @in.ReadInt(); fieldFactoryId = @in.ReadInt(); fieldClassId = @in.ReadInt(); if (k1 > 0) { var p = @in.ReadInt(); @in.Position(p); fieldVersion = @in.ReadInt(); ReadClassDefinition(@in, fieldFactoryId, fieldClassId, fieldVersion); } else { register = false; } } } builder.AddField(new FieldDefinition(i, name, type, fieldFactoryId, fieldClassId, fieldVersion)); } var classDefinition = builder.Build(); if (register) { classDefinition = RegisterClassDefinition(classDefinition); } return classDefinition; } private ClassDefinitionContext GetClassDefContext(int factoryId) { return _classDefContextMap.GetOrAdd(factoryId, theFactoryId => new ClassDefinitionContext(this, theFactoryId)); } private sealed class ClassDefinitionContext { private readonly ConcurrentDictionary<int, int> _currentClassVersions = new ConcurrentDictionary<int, int>(); private readonly int _factoryId; private readonly PortableContext _portableContext; private readonly ConcurrentDictionary<long, IClassDefinition> _versionedDefinitions = new ConcurrentDictionary<long, IClassDefinition>(); internal ClassDefinitionContext(PortableContext portableContext, int factoryId) { _portableContext = portableContext; _factoryId = factoryId; } internal int GetClassVersion(int classId) { int version; var hasValue = _currentClassVersions.TryGetValue(classId, out version); return hasValue ? version : -1; } internal IClassDefinition Lookup(int classId, int version) { var versionedClassId = Bits.CombineToLong(classId, version); IClassDefinition cd; _versionedDefinitions.TryGetValue(versionedClassId, out cd); return cd; } internal IClassDefinition Register(IClassDefinition cd) { if (cd == null) { return null; } if (cd.GetFactoryId() != _factoryId) { throw new HazelcastSerializationException("Invalid factory-id! " + _factoryId + " -> " + cd); } if (cd is ClassDefinition) { var cdImpl = (ClassDefinition) cd; cdImpl.SetVersionIfNotSet(_portableContext.GetVersion()); } var versionedClassId = Bits.CombineToLong(cd.GetClassId(), cd.GetVersion()); var currentCd = _versionedDefinitions.GetOrAdd(versionedClassId, cd); if (Equals(currentCd, cd)) { return cd; } if (currentCd is ClassDefinition) { if (!currentCd.Equals(cd)) { throw new HazelcastSerializationException( "Incompatible class-definitions with same class-id: " + cd + " VS " + currentCd); } return currentCd; } _versionedDefinitions.AddOrUpdate(versionedClassId, cd, (key, oldValue) => cd); return cd; } internal void SetClassVersion(int classId, int version) { var hasAdded = _currentClassVersions.TryAdd(classId, version); if (!hasAdded && _currentClassVersions[classId] != version) { throw new ArgumentException("Class-id: " + classId + " is already registered!"); } } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. namespace System.Text { using System; using System.Runtime.Serialization; using System.Security.Permissions; using System.Diagnostics.Contracts; // ASCIIEncoding // // Note that ASCIIEncoding is optomized with no best fit and ? for fallback. // It doesn't come in other flavors. // // Note: ASCIIEncoding is the only encoding that doesn't do best fit (windows has best fit). // // Note: IsAlwaysNormalized remains false because 1/2 the code points are unassigned, so they'd // use fallbacks, and we cannot guarantee that fallbacks are normalized. // [Serializable] [System.Runtime.InteropServices.ComVisible(true)] public class ASCIIEncoding : Encoding { // Used by Encoding.ASCII for lazy initialization // The initialization code will not be run until a static member of the class is referenced internal static readonly ASCIIEncoding s_default = new ASCIIEncoding(); public ASCIIEncoding() : base(Encoding.CodePageASCII) { } internal override void SetDefaultFallbacks() { // For ASCIIEncoding we just use default replacement fallback this.encoderFallback = EncoderFallback.ReplacementFallback; this.decoderFallback = DecoderFallback.ReplacementFallback; } // WARNING: GetByteCount(string chars), GetBytes(string chars,...), and GetString(byte[] byteIndex...) // WARNING: have different variable names than EncodingNLS.cs, so this can't just be cut & pasted, // WARNING: or it'll break VB's way of calling these. // NOTE: Many methods in this class forward to EncodingForwarder for // validating arguments/wrapping the unsafe methods in this class // which do the actual work. That class contains // shared logic for doing this which is used by // ASCIIEncoding, EncodingNLS, UnicodeEncoding, UTF32Encoding, // UTF7Encoding, and UTF8Encoding. // The reason the code is separated out into a static class, rather // than a base class which overrides all of these methods for us // (which is what EncodingNLS is for internal Encodings) is because // that's really more of an implementation detail so it's internal. // At the same time, C# doesn't allow a public class subclassing an // internal/private one, so we end up having to re-override these // methods in all of the public Encodings + EncodingNLS. // Returns the number of bytes required to encode a range of characters in // a character array. public override int GetByteCount(char[] chars, int index, int count) { return EncodingForwarder.GetByteCount(this, chars, index, count); } public override int GetByteCount(String chars) { return EncodingForwarder.GetByteCount(this, chars); } [System.Security.SecurityCritical] // auto-generated [CLSCompliant(false)] [System.Runtime.InteropServices.ComVisible(false)] public override unsafe int GetByteCount(char* chars, int count) { return EncodingForwarder.GetByteCount(this, chars, count); } public override int GetBytes(String chars, int charIndex, int charCount, byte[] bytes, int byteIndex) { return EncodingForwarder.GetBytes(this, chars, charIndex, charCount, bytes, byteIndex); } // Encodes a range of characters in a character array into a range of bytes // in a byte array. An exception occurs if the byte array is not large // enough to hold the complete encoding of the characters. The // GetByteCount method can be used to determine the exact number of // bytes that will be produced for a given range of characters. // Alternatively, the GetMaxByteCount method can be used to // determine the maximum number of bytes that will be produced for a given // number of characters, regardless of the actual character values. public override int GetBytes(char[] chars, int charIndex, int charCount, byte[] bytes, int byteIndex) { return EncodingForwarder.GetBytes(this, chars, charIndex, charCount, bytes, byteIndex); } [System.Security.SecurityCritical] // auto-generated [CLSCompliant(false)] [System.Runtime.InteropServices.ComVisible(false)] public override unsafe int GetBytes(char* chars, int charCount, byte* bytes, int byteCount) { return EncodingForwarder.GetBytes(this, chars, charCount, bytes, byteCount); } // Returns the number of characters produced by decoding a range of bytes // in a byte array. public override int GetCharCount(byte[] bytes, int index, int count) { return EncodingForwarder.GetCharCount(this, bytes, index, count); } [System.Security.SecurityCritical] // auto-generated [CLSCompliant(false)] [System.Runtime.InteropServices.ComVisible(false)] public override unsafe int GetCharCount(byte* bytes, int count) { return EncodingForwarder.GetCharCount(this, bytes, count); } public override int GetChars(byte[] bytes, int byteIndex, int byteCount, char[] chars, int charIndex) { return EncodingForwarder.GetChars(this, bytes, byteIndex, byteCount, chars, charIndex); } [System.Security.SecurityCritical] // auto-generated [CLSCompliant(false)] [System.Runtime.InteropServices.ComVisible(false)] public unsafe override int GetChars(byte* bytes, int byteCount, char* chars, int charCount) { return EncodingForwarder.GetChars(this, bytes, byteCount, chars, charCount); } // Returns a string containing the decoded representation of a range of // bytes in a byte array. public override String GetString(byte[] bytes, int byteIndex, int byteCount) { return EncodingForwarder.GetString(this, bytes, byteIndex, byteCount); } // End of overridden methods which use EncodingForwarder // GetByteCount // Note: We start by assuming that the output will be the same as count. Having // an encoder or fallback may change that assumption [System.Security.SecurityCritical] // auto-generated internal override unsafe int GetByteCount(char* chars, int charCount, EncoderNLS encoder) { // Just need to ASSERT, this is called by something else internal that checked parameters already Contract.Assert(charCount >= 0, "[ASCIIEncoding.GetByteCount]count is negative"); Contract.Assert(chars != null, "[ASCIIEncoding.GetByteCount]chars is null"); // Assert because we shouldn't be able to have a null encoder. Contract.Assert(encoderFallback != null, "[ASCIIEncoding.GetByteCount]Attempting to use null fallback encoder"); char charLeftOver = (char)0; EncoderReplacementFallback fallback = null; // Start by assuming default count, then +/- for fallback characters char* charEnd = chars + charCount; // For fallback we may need a fallback buffer, we know we aren't default fallback. EncoderFallbackBuffer fallbackBuffer = null; if (encoder != null) { charLeftOver = encoder.charLeftOver; Contract.Assert(charLeftOver == 0 || Char.IsHighSurrogate(charLeftOver), "[ASCIIEncoding.GetByteCount]leftover character should be high surrogate"); fallback = encoder.Fallback as EncoderReplacementFallback; // We mustn't have left over fallback data when counting if (encoder.InternalHasFallbackBuffer) { // We always need the fallback buffer in get bytes so we can flush any remaining ones if necessary fallbackBuffer = encoder.FallbackBuffer; if (fallbackBuffer.Remaining > 0 && encoder.m_throwOnOverflow) throw new ArgumentException(Environment.GetResourceString("Argument_EncoderFallbackNotEmpty", this.EncodingName, encoder.Fallback.GetType())); // Set our internal fallback interesting things. fallbackBuffer.InternalInitialize(chars, charEnd, encoder, false); } // Verify that we have no fallbackbuffer, for ASCII its always empty, so just assert Contract.Assert(!encoder.m_throwOnOverflow || !encoder.InternalHasFallbackBuffer || encoder.FallbackBuffer.Remaining == 0, "[ASCIICodePageEncoding.GetByteCount]Expected empty fallback buffer"); // if (encoder.InternalHasFallbackBuffer && encoder.FallbackBuffer.Remaining > 0) // throw new ArgumentException(Environment.GetResourceString("Argument_EncoderFallbackNotEmpty", // this.EncodingName, encoder.Fallback.GetType())); } else { fallback = this.EncoderFallback as EncoderReplacementFallback; } // If we have an encoder AND we aren't using default fallback, // then we may have a complicated count. if (fallback != null && fallback.MaxCharCount == 1) { // Replacement fallback encodes surrogate pairs as two ?? (or two whatever), so return size is always // same as input size. // Note that no existing SBCS code pages map code points to supplimentary characters, so this is easy. // We could however have 1 extra byte if the last call had an encoder and a funky fallback and // if we don't use the funky fallback this time. // Do we have an extra char left over from last time? if (charLeftOver > 0) charCount++; return (charCount); } // Count is more complicated if you have a funky fallback // For fallback we may need a fallback buffer, we know we're not default fallback int byteCount = 0; // We may have a left over character from last time, try and process it. if (charLeftOver > 0) { Contract.Assert(Char.IsHighSurrogate(charLeftOver), "[ASCIIEncoding.GetByteCount]leftover character should be high surrogate"); Contract.Assert(encoder != null, "[ASCIIEncoding.GetByteCount]Expected encoder"); // Since left over char was a surrogate, it'll have to be fallen back. // Get Fallback fallbackBuffer = encoder.FallbackBuffer; fallbackBuffer.InternalInitialize(chars, charEnd, encoder, false); // This will fallback a pair if *chars is a low surrogate fallbackBuffer.InternalFallback(charLeftOver, ref chars); } // Now we may have fallback char[] already from the encoder // Go ahead and do it, including the fallback. char ch; while ((ch = (fallbackBuffer == null) ? '\0' : fallbackBuffer.InternalGetNextChar()) != 0 || chars < charEnd) { // First unwind any fallback if (ch == 0) { // No fallback, just get next char ch = *chars; chars++; } // Check for fallback, this'll catch surrogate pairs too. // no chars >= 0x80 are allowed. if (ch > 0x7f) { if (fallbackBuffer == null) { // Initialize the buffer if (encoder == null) fallbackBuffer = this.encoderFallback.CreateFallbackBuffer(); else fallbackBuffer = encoder.FallbackBuffer; fallbackBuffer.InternalInitialize(charEnd - charCount, charEnd, encoder, false); } // Get Fallback fallbackBuffer.InternalFallback(ch, ref chars); continue; } // We'll use this one byteCount++; } Contract.Assert(fallbackBuffer == null || fallbackBuffer.Remaining == 0, "[ASCIIEncoding.GetByteCount]Expected Empty fallback buffer"); return byteCount; } [System.Security.SecurityCritical] // auto-generated internal override unsafe int GetBytes(char* chars, int charCount, byte* bytes, int byteCount, EncoderNLS encoder) { // Just need to ASSERT, this is called by something else internal that checked parameters already Contract.Assert(bytes != null, "[ASCIIEncoding.GetBytes]bytes is null"); Contract.Assert(byteCount >= 0, "[ASCIIEncoding.GetBytes]byteCount is negative"); Contract.Assert(chars != null, "[ASCIIEncoding.GetBytes]chars is null"); Contract.Assert(charCount >= 0, "[ASCIIEncoding.GetBytes]charCount is negative"); // Assert because we shouldn't be able to have a null encoder. Contract.Assert(encoderFallback != null, "[ASCIIEncoding.GetBytes]Attempting to use null encoder fallback"); // Get any left over characters char charLeftOver = (char)0; EncoderReplacementFallback fallback = null; // For fallback we may need a fallback buffer, we know we aren't default fallback. EncoderFallbackBuffer fallbackBuffer = null; // prepare our end char* charEnd = chars + charCount; byte* byteStart = bytes; char* charStart = chars; if (encoder != null) { charLeftOver = encoder.charLeftOver; fallback = encoder.Fallback as EncoderReplacementFallback; // We mustn't have left over fallback data when counting if (encoder.InternalHasFallbackBuffer) { // We always need the fallback buffer in get bytes so we can flush any remaining ones if necessary fallbackBuffer = encoder.FallbackBuffer; if (fallbackBuffer.Remaining > 0 && encoder.m_throwOnOverflow) throw new ArgumentException(Environment.GetResourceString("Argument_EncoderFallbackNotEmpty", this.EncodingName, encoder.Fallback.GetType())); // Set our internal fallback interesting things. fallbackBuffer.InternalInitialize(charStart, charEnd, encoder, true); } Contract.Assert(charLeftOver == 0 || Char.IsHighSurrogate(charLeftOver), "[ASCIIEncoding.GetBytes]leftover character should be high surrogate"); // Verify that we have no fallbackbuffer, for ASCII its always empty, so just assert Contract.Assert(!encoder.m_throwOnOverflow || !encoder.InternalHasFallbackBuffer || encoder.FallbackBuffer.Remaining == 0, "[ASCIICodePageEncoding.GetBytes]Expected empty fallback buffer"); // if (encoder.m_throwOnOverflow && encoder.InternalHasFallbackBuffer && // encoder.FallbackBuffer.Remaining > 0) // throw new ArgumentException(Environment.GetResourceString("Argument_EncoderFallbackNotEmpty", // this.EncodingName, encoder.Fallback.GetType())); } else { fallback = this.EncoderFallback as EncoderReplacementFallback; } // See if we do the fast default or slightly slower fallback if (fallback != null && fallback.MaxCharCount == 1) { // Fast version char cReplacement = fallback.DefaultString[0]; // Check for replacements in range, otherwise fall back to slow version. if (cReplacement <= (char)0x7f) { // We should have exactly as many output bytes as input bytes, unless there's a left // over character, in which case we may need one more. // If we had a left over character will have to add a ? (This happens if they had a funky // fallback last time, but not this time.) (We can't spit any out though // because with fallback encoder each surrogate is treated as a seperate code point) if (charLeftOver > 0) { // Have to have room // Throw even if doing no throw version because this is just 1 char, // so buffer will never be big enough if (byteCount == 0) ThrowBytesOverflow(encoder, true); // This'll make sure we still have more room and also make sure our return value is correct. *(bytes++) = (byte)cReplacement; byteCount--; // We used one of the ones we were counting. } // This keeps us from overrunning our output buffer if (byteCount < charCount) { // Throw or make buffer smaller? ThrowBytesOverflow(encoder, byteCount < 1); // Just use what we can charEnd = chars + byteCount; } // We just do a quick copy while (chars < charEnd) { char ch2 = *(chars++); if (ch2 >= 0x0080) *(bytes++) = (byte)cReplacement; else *(bytes++) = unchecked((byte)(ch2)); } // Clear encoder if (encoder != null) { encoder.charLeftOver = (char)0; encoder.m_charsUsed = (int)(chars-charStart); } return (int)(bytes - byteStart); } } // Slower version, have to do real fallback. // prepare our end byte* byteEnd = bytes + byteCount; // We may have a left over character from last time, try and process it. if (charLeftOver > 0) { // Initialize the buffer Contract.Assert(encoder != null, "[ASCIIEncoding.GetBytes]Expected non null encoder if we have surrogate left over"); fallbackBuffer = encoder.FallbackBuffer; fallbackBuffer.InternalInitialize(chars, charEnd, encoder, true); // Since left over char was a surrogate, it'll have to be fallen back. // Get Fallback // This will fallback a pair if *chars is a low surrogate fallbackBuffer.InternalFallback(charLeftOver, ref chars); } // Now we may have fallback char[] already from the encoder // Go ahead and do it, including the fallback. char ch; while ((ch = (fallbackBuffer == null) ? '\0' : fallbackBuffer.InternalGetNextChar()) != 0 || chars < charEnd) { // First unwind any fallback if (ch == 0) { // No fallback, just get next char ch = *chars; chars++; } // Check for fallback, this'll catch surrogate pairs too. // All characters >= 0x80 must fall back. if (ch > 0x7f) { // Initialize the buffer if (fallbackBuffer == null) { if (encoder == null) fallbackBuffer = this.encoderFallback.CreateFallbackBuffer(); else fallbackBuffer = encoder.FallbackBuffer; fallbackBuffer.InternalInitialize(charEnd - charCount, charEnd, encoder, true); } // Get Fallback fallbackBuffer.InternalFallback(ch, ref chars); // Go ahead & continue (& do the fallback) continue; } // We'll use this one // Bounds check if (bytes >= byteEnd) { // didn't use this char, we'll throw or use buffer if (fallbackBuffer == null || fallbackBuffer.bFallingBack == false) { Contract.Assert(chars > charStart || bytes == byteStart, "[ASCIIEncoding.GetBytes]Expected chars to have advanced already."); chars--; // don't use last char } else fallbackBuffer.MovePrevious(); // Are we throwing or using buffer? ThrowBytesOverflow(encoder, bytes == byteStart); // throw? break; // don't throw, stop } // Go ahead and add it *bytes = unchecked((byte)ch); bytes++; } // Need to do encoder stuff if (encoder != null) { // Fallback stuck it in encoder if necessary, but we have to clear MustFlush cases if (fallbackBuffer != null && !fallbackBuffer.bUsedEncoder) // Clear it in case of MustFlush encoder.charLeftOver = (char)0; // Set our chars used count encoder.m_charsUsed = (int)(chars - charStart); } Contract.Assert(fallbackBuffer == null || fallbackBuffer.Remaining == 0 || (encoder != null && !encoder.m_throwOnOverflow ), "[ASCIIEncoding.GetBytes]Expected Empty fallback buffer at end"); return (int)(bytes - byteStart); } // This is internal and called by something else, [System.Security.SecurityCritical] // auto-generated internal override unsafe int GetCharCount(byte* bytes, int count, DecoderNLS decoder) { // Just assert, we're called internally so these should be safe, checked already Contract.Assert(bytes != null, "[ASCIIEncoding.GetCharCount]bytes is null"); Contract.Assert(count >= 0, "[ASCIIEncoding.GetCharCount]byteCount is negative"); // ASCII doesn't do best fit, so don't have to check for it, find out which decoder fallback we're using DecoderReplacementFallback fallback = null; if (decoder == null) fallback = this.DecoderFallback as DecoderReplacementFallback; else { fallback = decoder.Fallback as DecoderReplacementFallback; Contract.Assert(!decoder.m_throwOnOverflow || !decoder.InternalHasFallbackBuffer || decoder.FallbackBuffer.Remaining == 0, "[ASCIICodePageEncoding.GetCharCount]Expected empty fallback buffer"); } if (fallback != null && fallback.MaxCharCount == 1) { // Just return length, SBCS stay the same length because they don't map to surrogate // pairs and we don't have a decoder fallback. return count; } // Only need decoder fallback buffer if not using default replacement fallback, no best fit for ASCII DecoderFallbackBuffer fallbackBuffer = null; // Have to do it the hard way. // Assume charCount will be == count int charCount = count; byte[] byteBuffer = new byte[1]; // Do it our fast way byte* byteEnd = bytes + count; // Quick loop while (bytes < byteEnd) { // Faster if don't use *bytes++; byte b = *bytes; bytes++; // If unknown we have to do fallback count if (b >= 0x80) { if (fallbackBuffer == null) { if (decoder == null) fallbackBuffer = this.DecoderFallback.CreateFallbackBuffer(); else fallbackBuffer = decoder.FallbackBuffer; fallbackBuffer.InternalInitialize(byteEnd - count, null); } // Use fallback buffer byteBuffer[0] = b; charCount--; // Have to unreserve the one we already allocated for b charCount += fallbackBuffer.InternalFallback(byteBuffer, bytes); } } // Fallback buffer must be empty Contract.Assert(fallbackBuffer == null || fallbackBuffer.Remaining == 0, "[ASCIIEncoding.GetCharCount]Expected Empty fallback buffer"); // Converted sequence is same length as input return charCount; } [System.Security.SecurityCritical] // auto-generated internal override unsafe int GetChars(byte* bytes, int byteCount, char* chars, int charCount, DecoderNLS decoder) { // Just need to ASSERT, this is called by something else internal that checked parameters already Contract.Assert(bytes != null, "[ASCIIEncoding.GetChars]bytes is null"); Contract.Assert(byteCount >= 0, "[ASCIIEncoding.GetChars]byteCount is negative"); Contract.Assert(chars != null, "[ASCIIEncoding.GetChars]chars is null"); Contract.Assert(charCount >= 0, "[ASCIIEncoding.GetChars]charCount is negative"); // Do it fast way if using ? replacement fallback byte* byteEnd = bytes + byteCount; byte* byteStart = bytes; char* charStart = chars; // Note: ASCII doesn't do best fit, but we have to fallback if they use something > 0x7f // Only need decoder fallback buffer if not using ? fallback. // ASCII doesn't do best fit, so don't have to check for it, find out which decoder fallback we're using DecoderReplacementFallback fallback = null; if (decoder == null) fallback = this.DecoderFallback as DecoderReplacementFallback; else { fallback = decoder.Fallback as DecoderReplacementFallback; Contract.Assert(!decoder.m_throwOnOverflow || !decoder.InternalHasFallbackBuffer || decoder.FallbackBuffer.Remaining == 0, "[ASCIICodePageEncoding.GetChars]Expected empty fallback buffer"); } if (fallback != null && fallback.MaxCharCount == 1) { // Try it the fast way char replacementChar = fallback.DefaultString[0]; // Need byteCount chars, otherwise too small buffer if (charCount < byteCount) { // Need at least 1 output byte, throw if must throw ThrowCharsOverflow(decoder, charCount < 1); // Not throwing, use what we can byteEnd = bytes + charCount; } // Quick loop, just do '?' replacement because we don't have fallbacks for decodings. while (bytes < byteEnd) { byte b = *(bytes++); if (b >= 0x80) // This is an invalid byte in the ASCII encoding. *(chars++) = replacementChar; else *(chars++) = unchecked((char)b); } // bytes & chars used are the same if (decoder != null) decoder.m_bytesUsed = (int)(bytes - byteStart); return (int)(chars - charStart); } // Slower way's going to need a fallback buffer DecoderFallbackBuffer fallbackBuffer = null; byte[] byteBuffer = new byte[1]; char* charEnd = chars + charCount; // Not quite so fast loop while (bytes < byteEnd) { // Faster if don't use *bytes++; byte b = *(bytes); bytes++; if (b >= 0x80) { // This is an invalid byte in the ASCII encoding. if (fallbackBuffer == null) { if (decoder == null) fallbackBuffer = this.DecoderFallback.CreateFallbackBuffer(); else fallbackBuffer = decoder.FallbackBuffer; fallbackBuffer.InternalInitialize(byteEnd - byteCount, charEnd); } // Use fallback buffer byteBuffer[0] = b; // Note that chars won't get updated unless this succeeds if (!fallbackBuffer.InternalFallback(byteBuffer, bytes, ref chars)) { // May or may not throw, but we didn't get this byte Contract.Assert(bytes > byteStart || chars == charStart, "[ASCIIEncoding.GetChars]Expected bytes to have advanced already (fallback case)"); bytes--; // unused byte fallbackBuffer.InternalReset(); // Didn't fall this back ThrowCharsOverflow(decoder, chars == charStart); // throw? break; // don't throw, but stop loop } } else { // Make sure we have buffer space if (chars >= charEnd) { Contract.Assert(bytes > byteStart || chars == charStart, "[ASCIIEncoding.GetChars]Expected bytes to have advanced already (normal case)"); bytes--; // unused byte ThrowCharsOverflow(decoder, chars == charStart); // throw? break; // don't throw, but stop loop } *(chars) = unchecked((char)b); chars++; } } // Might have had decoder fallback stuff. if (decoder != null) decoder.m_bytesUsed = (int)(bytes - byteStart); // Expect Empty fallback buffer for GetChars Contract.Assert(fallbackBuffer == null || fallbackBuffer.Remaining == 0, "[ASCIIEncoding.GetChars]Expected Empty fallback buffer"); return (int)(chars - charStart); } public override int GetMaxByteCount(int charCount) { if (charCount < 0) throw new ArgumentOutOfRangeException(nameof(charCount), Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegNum")); Contract.EndContractBlock(); // Characters would be # of characters + 1 in case high surrogate is ? * max fallback long byteCount = (long)charCount + 1; if (EncoderFallback.MaxCharCount > 1) byteCount *= EncoderFallback.MaxCharCount; // 1 to 1 for most characters. Only surrogates with fallbacks have less. if (byteCount > 0x7fffffff) throw new ArgumentOutOfRangeException(nameof(charCount), Environment.GetResourceString("ArgumentOutOfRange_GetByteCountOverflow")); return (int)byteCount; } public override int GetMaxCharCount(int byteCount) { if (byteCount < 0) throw new ArgumentOutOfRangeException(nameof(byteCount), Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegNum")); Contract.EndContractBlock(); // Just return length, SBCS stay the same length because they don't map to surrogate long charCount = (long)byteCount; // 1 to 1 for most characters. Only surrogates with fallbacks have less, unknown fallbacks could be longer. if (DecoderFallback.MaxCharCount > 1) charCount *= DecoderFallback.MaxCharCount; if (charCount > 0x7fffffff) throw new ArgumentOutOfRangeException(nameof(byteCount), Environment.GetResourceString("ArgumentOutOfRange_GetCharCountOverflow")); return (int)charCount; } // True if and only if the encoding only uses single byte code points. (Ie, ASCII, 1252, etc) [System.Runtime.InteropServices.ComVisible(false)] public override bool IsSingleByte { get { return true; } } [System.Runtime.InteropServices.ComVisible(false)] public override Decoder GetDecoder() { return new DecoderNLS(this); } [System.Runtime.InteropServices.ComVisible(false)] public override Encoder GetEncoder() { return new EncoderNLS(this); } } }
using System.Runtime.Remoting; using NUnit.Framework; using System; using System.Collections.Generic; using System.Linq; using Umbraco.Core; using Umbraco.Core.Models; using Umbraco.Core.Models.Rdbms; using Umbraco.Tests.CodeFirst.TestModels.Composition; using Umbraco.Tests.TestHelpers; using Umbraco.Tests.TestHelpers.Entities; namespace Umbraco.Tests.Services { [DatabaseTestBehavior(DatabaseBehavior.NewDbFileAndSchemaPerTest)] [TestFixture, RequiresSTA] public class ContentTypeServiceTests : BaseServiceTest { [SetUp] public override void Initialize() { base.Initialize(); } [TearDown] public override void TearDown() { base.TearDown(); } [Test] public void Deleting_PropertyType_Removes_The_Property_From_Content() { IContentType contentType1 = MockedContentTypes.CreateTextpageContentType("test1", "Test1"); ServiceContext.ContentTypeService.Save(contentType1); IContent contentItem = MockedContent.CreateTextpageContent(contentType1, "Testing", -1); ServiceContext.ContentService.SaveAndPublishWithStatus(contentItem); var initProps = contentItem.Properties.Count; var initPropTypes = contentItem.PropertyTypes.Count(); //remove a property contentType1.RemovePropertyType(contentType1.PropertyTypes.First().Alias); ServiceContext.ContentTypeService.Save(contentType1); //re-load it from the db contentItem = ServiceContext.ContentService.GetById(contentItem.Id); Assert.AreEqual(initPropTypes - 1, contentItem.PropertyTypes.Count()); Assert.AreEqual(initProps - 1, contentItem.Properties.Count); } [Test] public void Rebuild_Content_Xml_On_Alias_Change() { var contentType1 = MockedContentTypes.CreateTextpageContentType("test1", "Test1"); var contentType2 = MockedContentTypes.CreateTextpageContentType("test2", "Test2"); ServiceContext.ContentTypeService.Save(contentType1); ServiceContext.ContentTypeService.Save(contentType2); var contentItems1 = MockedContent.CreateTextpageContent(contentType1, -1, 10).ToArray(); contentItems1.ForEach(x => ServiceContext.ContentService.SaveAndPublishWithStatus(x)); var contentItems2 = MockedContent.CreateTextpageContent(contentType2, -1, 5).ToArray(); contentItems2.ForEach(x => ServiceContext.ContentService.SaveAndPublishWithStatus(x)); //only update the contentType1 alias which will force an xml rebuild for all content of that type contentType1.Alias = "newAlias"; ServiceContext.ContentTypeService.Save(contentType1); foreach (var c in contentItems1) { var xml = DatabaseContext.Database.FirstOrDefault<ContentXmlDto>("WHERE nodeId = @Id", new { Id = c.Id }); Assert.IsNotNull(xml); Assert.IsTrue(xml.Xml.StartsWith("<newAlias")); } foreach (var c in contentItems2) { var xml = DatabaseContext.Database.FirstOrDefault<ContentXmlDto>("WHERE nodeId = @Id", new { Id = c.Id }); Assert.IsNotNull(xml); Assert.IsTrue(xml.Xml.StartsWith("<test2")); //should remain the same } } [Test] public void Rebuild_Content_Xml_On_Property_Removal() { var contentType1 = MockedContentTypes.CreateTextpageContentType("test1", "Test1"); ServiceContext.ContentTypeService.Save(contentType1); var contentItems1 = MockedContent.CreateTextpageContent(contentType1, -1, 10).ToArray(); contentItems1.ForEach(x => ServiceContext.ContentService.SaveAndPublishWithStatus(x)); var alias = contentType1.PropertyTypes.First().Alias; var elementToMatch = "<" + alias + ">"; foreach (var c in contentItems1) { var xml = DatabaseContext.Database.FirstOrDefault<ContentXmlDto>("WHERE nodeId = @Id", new { Id = c.Id }); Assert.IsNotNull(xml); Assert.IsTrue(xml.Xml.Contains(elementToMatch)); //verify that it is there before we remove the property } //remove a property contentType1.RemovePropertyType(contentType1.PropertyTypes.First().Alias); ServiceContext.ContentTypeService.Save(contentType1); var reQueried = ServiceContext.ContentTypeService.GetContentType(contentType1.Id); var reContent = ServiceContext.ContentService.GetById(contentItems1.First().Id); foreach (var c in contentItems1) { var xml = DatabaseContext.Database.FirstOrDefault<ContentXmlDto>("WHERE nodeId = @Id", new { Id = c.Id }); Assert.IsNotNull(xml); Assert.IsFalse(xml.Xml.Contains(elementToMatch)); //verify that it is no longer there } } [Test] public void Get_Descendants() { // Arrange var contentTypeService = ServiceContext.ContentTypeService; var hierarchy = CreateContentTypeHierarchy(); contentTypeService.Save(hierarchy, 0); //ensure they are saved! var master = hierarchy.First(); //Act var descendants = master.Descendants(); //Assert Assert.AreEqual(10, descendants.Count()); } [Test] public void Get_Descendants_And_Self() { // Arrange var contentTypeService = ServiceContext.ContentTypeService; var hierarchy = CreateContentTypeHierarchy(); contentTypeService.Save(hierarchy, 0); //ensure they are saved! var master = hierarchy.First(); //Act var descendants = master.DescendantsAndSelf(); //Assert Assert.AreEqual(11, descendants.Count()); } [Test] public void Get_With_Missing_Guid() { // Arrange var contentTypeService = ServiceContext.ContentTypeService; //Act var result = contentTypeService.GetMediaType(Guid.NewGuid()); //Assert Assert.IsNull(result); } [Test] public void Can_Bulk_Save_New_Hierarchy_Content_Types() { // Arrange var contentTypeService = ServiceContext.ContentTypeService; var hierarchy = CreateContentTypeHierarchy(); // Act contentTypeService.Save(hierarchy, 0); Assert.That(hierarchy.Any(), Is.True); Assert.That(hierarchy.Any(x => x.HasIdentity == false), Is.False); //all parent id's should be ok, they are lazy and if they equal zero an exception will be thrown Assert.DoesNotThrow(() => hierarchy.Any(x => x.ParentId != 0)); for (var i = 0; i < hierarchy.Count(); i++) { if (i == 0) continue; Assert.AreEqual(hierarchy.ElementAt(i).ParentId, hierarchy.ElementAt(i - 1).Id); } } [Test] public void Can_Save_ContentType_Structure_And_Create_Content_Based_On_It() { // Arrange var cs = ServiceContext.ContentService; var cts = ServiceContext.ContentTypeService; var dtdYesNo = ServiceContext.DataTypeService.GetDataTypeDefinitionById(-49); var ctBase = new ContentType(-1) { Name = "Base", Alias = "Base", Icon = "folder.gif", Thumbnail = "folder.png" }; ctBase.AddPropertyType(new PropertyType(dtdYesNo, Constants.Conventions.Content.NaviHide) { Name = "Hide From Navigation", } /*,"Navigation"*/); cts.Save(ctBase); const string contentTypeAlias = "HomePage"; var ctHomePage = new ContentType(ctBase, contentTypeAlias) { Name = "Home Page", Alias = contentTypeAlias, Icon = "settingDomain.gif", Thumbnail = "folder.png", AllowedAsRoot = true }; ctHomePage.AddPropertyType(new PropertyType(dtdYesNo, "someProperty") { Name = "Some property" } /*,"Navigation"*/); cts.Save(ctHomePage); // Act var homeDoc = cs.CreateContent("Home Page", -1, contentTypeAlias); cs.SaveAndPublishWithStatus(homeDoc); // Assert Assert.That(ctBase.HasIdentity, Is.True); Assert.That(ctHomePage.HasIdentity, Is.True); Assert.That(homeDoc.HasIdentity, Is.True); Assert.That(homeDoc.ContentTypeId, Is.EqualTo(ctHomePage.Id)); } [Test] public void Create_Content_Type_Ensures_Sort_Orders() { var service = ServiceContext.ContentTypeService; var contentType = new ContentType(-1) { Alias = "test", Name = "Test", Description = "ContentType used for simple text pages", Icon = ".sprTreeDoc3", Thumbnail = "doc2.png", SortOrder = 1, CreatorId = 0, Trashed = false }; contentType.AddPropertyType(new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "title") { Name = "Title", Description = "", Mandatory = false, DataTypeDefinitionId = -88 }); contentType.AddPropertyType(new PropertyType(Constants.PropertyEditors.TinyMCEAlias, DataTypeDatabaseType.Ntext, "bodyText") { Name = "Body Text", Description = "", Mandatory = false, DataTypeDefinitionId = -87 }); contentType.AddPropertyType(new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author") { Name = "Author", Description = "Name of the author", Mandatory = false, DataTypeDefinitionId = -88 }); service.Save(contentType); var sortOrders = contentType.PropertyTypes.Select(x => x.SortOrder).ToArray(); Assert.AreEqual(1, sortOrders.Count(x => x == 0)); Assert.AreEqual(1, sortOrders.Count(x => x == 1)); Assert.AreEqual(1, sortOrders.Count(x => x == 2)); } [Test] public void Can_Create_And_Save_ContentType_Composition() { /* * Global * - Components * - Category */ var service = ServiceContext.ContentTypeService; var global = MockedContentTypes.CreateSimpleContentType("global", "Global"); service.Save(global); var components = MockedContentTypes.CreateSimpleContentType("components", "Components", global, true); service.Save(components); var component = MockedContentTypes.CreateSimpleContentType("component", "Component", components, true); service.Save(component); var category = MockedContentTypes.CreateSimpleContentType("category", "Category", global, true); service.Save(category); var success = category.AddContentType(component); Assert.That(success, Is.False); } [Test] public void Can_Remove_ContentType_Composition_From_ContentType() { //Test for U4-2234 var cts = ServiceContext.ContentTypeService; //Arrange var component = CreateComponent(); cts.Save(component); var banner = CreateBannerComponent(component); cts.Save(banner); var site = CreateSite(); cts.Save(site); var homepage = CreateHomepage(site); cts.Save(homepage); //Add banner to homepage var added = homepage.AddContentType(banner); cts.Save(homepage); //Assert composition var bannerExists = homepage.ContentTypeCompositionExists(banner.Alias); var bannerPropertyExists = homepage.CompositionPropertyTypes.Any(x => x.Alias.Equals("bannerName")); Assert.That(added, Is.True); Assert.That(bannerExists, Is.True); Assert.That(bannerPropertyExists, Is.True); Assert.That(homepage.CompositionPropertyTypes.Count(), Is.EqualTo(6)); //Remove banner from homepage var removed = homepage.RemoveContentType(banner.Alias); cts.Save(homepage); //Assert composition var bannerStillExists = homepage.ContentTypeCompositionExists(banner.Alias); var bannerPropertyStillExists = homepage.CompositionPropertyTypes.Any(x => x.Alias.Equals("bannerName")); Assert.That(removed, Is.True); Assert.That(bannerStillExists, Is.False); Assert.That(bannerPropertyStillExists, Is.False); Assert.That(homepage.CompositionPropertyTypes.Count(), Is.EqualTo(4)); } [Test] public void Can_Copy_ContentType_By_Performing_Clone() { // Arrange var service = ServiceContext.ContentTypeService; var metaContentType = MockedContentTypes.CreateMetaContentType(); service.Save(metaContentType); var simpleContentType = MockedContentTypes.CreateSimpleContentType("category", "Category", metaContentType); service.Save(simpleContentType); var categoryId = simpleContentType.Id; // Act var sut = simpleContentType.DeepCloneWithResetIdentities("newcategory"); service.Save(sut); // Assert Assert.That(sut.HasIdentity, Is.True); var contentType = service.GetContentType(sut.Id); var category = service.GetContentType(categoryId); Assert.That(contentType.CompositionAliases().Any(x => x.Equals("meta")), Is.True); Assert.AreEqual(contentType.ParentId, category.ParentId); Assert.AreEqual(contentType.Level, category.Level); Assert.AreEqual(contentType.PropertyTypes.Count(), category.PropertyTypes.Count()); Assert.AreNotEqual(contentType.Id, category.Id); Assert.AreNotEqual(contentType.Key, category.Key); Assert.AreNotEqual(contentType.Path, category.Path); Assert.AreNotEqual(contentType.SortOrder, category.SortOrder); Assert.AreNotEqual(contentType.PropertyTypes.First(x => x.Alias.Equals("title")).Id, category.PropertyTypes.First(x => x.Alias.Equals("title")).Id); Assert.AreNotEqual(contentType.PropertyGroups.First(x => x.Name.Equals("Content")).Id, category.PropertyGroups.First(x => x.Name.Equals("Content")).Id); } [Test] public void Can_Copy_ContentType_To_New_Parent_By_Performing_Clone() { // Arrange var service = ServiceContext.ContentTypeService; var parentContentType1 = MockedContentTypes.CreateSimpleContentType("parent1", "Parent1"); service.Save(parentContentType1); var parentContentType2 = MockedContentTypes.CreateSimpleContentType("parent2", "Parent2", null, true); service.Save(parentContentType2); var simpleContentType = MockedContentTypes.CreateSimpleContentType("category", "Category", parentContentType1, true); service.Save(simpleContentType); // Act var clone = simpleContentType.DeepCloneWithResetIdentities("newcategory"); clone.RemoveContentType("parent1"); clone.AddContentType(parentContentType2); clone.ParentId = parentContentType2.Id; service.Save(clone); // Assert Assert.That(clone.HasIdentity, Is.True); var clonedContentType = service.GetContentType(clone.Id); var originalContentType = service.GetContentType(simpleContentType.Id); Assert.That(clonedContentType.CompositionAliases().Any(x => x.Equals("parent2")), Is.True); Assert.That(clonedContentType.CompositionAliases().Any(x => x.Equals("parent1")), Is.False); Assert.AreEqual(clonedContentType.Path, "-1," + parentContentType2.Id + "," + clonedContentType.Id); Assert.AreEqual(clonedContentType.PropertyTypes.Count(), originalContentType.PropertyTypes.Count()); Assert.AreNotEqual(clonedContentType.ParentId, originalContentType.ParentId); Assert.AreEqual(clonedContentType.ParentId, parentContentType2.Id); Assert.AreNotEqual(clonedContentType.Id, originalContentType.Id); Assert.AreNotEqual(clonedContentType.Key, originalContentType.Key); Assert.AreNotEqual(clonedContentType.Path, originalContentType.Path); Assert.AreNotEqual(clonedContentType.PropertyTypes.First(x => x.Alias.StartsWith("title")).Id, originalContentType.PropertyTypes.First(x => x.Alias.StartsWith("title")).Id); Assert.AreNotEqual(clonedContentType.PropertyGroups.First(x => x.Name.StartsWith("Content")).Id, originalContentType.PropertyGroups.First(x => x.Name.StartsWith("Content")).Id); } [Test] public void Can_Copy_ContentType_With_Service_To_Root() { // Arrange var service = ServiceContext.ContentTypeService; var metaContentType = MockedContentTypes.CreateMetaContentType(); service.Save(metaContentType); var simpleContentType = MockedContentTypes.CreateSimpleContentType("category", "Category", metaContentType); service.Save(simpleContentType); var categoryId = simpleContentType.Id; // Act var clone = service.Copy(simpleContentType, "newcategory", "new category"); // Assert Assert.That(clone.HasIdentity, Is.True); var cloned = service.GetContentType(clone.Id); var original = service.GetContentType(categoryId); Assert.That(cloned.CompositionAliases().Any(x => x.Equals("meta")), Is.False); //it's been copied to root Assert.AreEqual(cloned.ParentId, -1); Assert.AreEqual(cloned.Level, 1); Assert.AreEqual(cloned.PropertyTypes.Count(), original.PropertyTypes.Count()); Assert.AreEqual(cloned.PropertyGroups.Count(), original.PropertyGroups.Count()); for (int i = 0; i < cloned.PropertyGroups.Count; i++) { Assert.AreEqual(cloned.PropertyGroups[i].PropertyTypes.Count, original.PropertyGroups[i].PropertyTypes.Count); foreach (var propertyType in cloned.PropertyGroups[i].PropertyTypes) { Assert.IsTrue(propertyType.HasIdentity); } } foreach (var propertyType in cloned.PropertyTypes) { Assert.IsTrue(propertyType.HasIdentity); } Assert.AreNotEqual(cloned.Id, original.Id); Assert.AreNotEqual(cloned.Key, original.Key); Assert.AreNotEqual(cloned.Path, original.Path); Assert.AreNotEqual(cloned.SortOrder, original.SortOrder); Assert.AreNotEqual(cloned.PropertyTypes.First(x => x.Alias.Equals("title")).Id, original.PropertyTypes.First(x => x.Alias.Equals("title")).Id); Assert.AreNotEqual(cloned.PropertyGroups.First(x => x.Name.Equals("Content")).Id, original.PropertyGroups.First(x => x.Name.Equals("Content")).Id); } [Test] public void Can_Copy_ContentType_To_New_Parent_With_Service() { // Arrange var service = ServiceContext.ContentTypeService; var parentContentType1 = MockedContentTypes.CreateSimpleContentType("parent1", "Parent1"); service.Save(parentContentType1); var parentContentType2 = MockedContentTypes.CreateSimpleContentType("parent2", "Parent2", null, true); service.Save(parentContentType2); var simpleContentType = MockedContentTypes.CreateSimpleContentType("category", "Category", parentContentType1, true); service.Save(simpleContentType); // Act var clone = service.Copy(simpleContentType, "newAlias", "new alias", parentContentType2); // Assert Assert.That(clone.HasIdentity, Is.True); var clonedContentType = service.GetContentType(clone.Id); var originalContentType = service.GetContentType(simpleContentType.Id); Assert.That(clonedContentType.CompositionAliases().Any(x => x.Equals("parent2")), Is.True); Assert.That(clonedContentType.CompositionAliases().Any(x => x.Equals("parent1")), Is.False); Assert.AreEqual(clonedContentType.Path, "-1," + parentContentType2.Id + "," + clonedContentType.Id); Assert.AreEqual(clonedContentType.PropertyTypes.Count(), originalContentType.PropertyTypes.Count()); Assert.AreNotEqual(clonedContentType.ParentId, originalContentType.ParentId); Assert.AreEqual(clonedContentType.ParentId, parentContentType2.Id); Assert.AreNotEqual(clonedContentType.Id, originalContentType.Id); Assert.AreNotEqual(clonedContentType.Key, originalContentType.Key); Assert.AreNotEqual(clonedContentType.Path, originalContentType.Path); Assert.AreNotEqual(clonedContentType.PropertyTypes.First(x => x.Alias.StartsWith("title")).Id, originalContentType.PropertyTypes.First(x => x.Alias.StartsWith("title")).Id); Assert.AreNotEqual(clonedContentType.PropertyGroups.First(x => x.Name.StartsWith("Content")).Id, originalContentType.PropertyGroups.First(x => x.Name.StartsWith("Content")).Id); } [Test] public void Cannot_Add_Duplicate_PropertyType_Alias_To_Referenced_Composition() { //Related the second issue in screencast from this post http://issues.umbraco.org/issue/U4-5986 // Arrange var service = ServiceContext.ContentTypeService; var parent = MockedContentTypes.CreateSimpleContentType(); service.Save(parent); var child = MockedContentTypes.CreateSimpleContentType("simpleChildPage", "Simple Child Page", parent, true); service.Save(child); var composition = MockedContentTypes.CreateMetaContentType(); service.Save(composition); //Adding Meta-composition to child doc type child.AddContentType(composition); service.Save(child); // Act var duplicatePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "title") { Name = "Title", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var added = composition.AddPropertyType(duplicatePropertyType, "Meta"); // Assert Assert.That(added, Is.True); Assert.Throws<Exception>(() => service.Save(composition)); Assert.DoesNotThrow(() => service.GetContentType("simpleChildPage")); } [Test] public void Cannot_Add_Duplicate_PropertyType_Alias_In_Composition_Graph() { // Arrange var service = ServiceContext.ContentTypeService; var basePage = MockedContentTypes.CreateSimpleContentType("basePage", "Base Page", null, true); service.Save(basePage); var contentPage = MockedContentTypes.CreateSimpleContentType("contentPage", "Content Page", basePage); service.Save(contentPage); var advancedPage = MockedContentTypes.CreateSimpleContentType("advancedPage", "Advanced Page", contentPage, true); service.Save(advancedPage); var metaComposition = MockedContentTypes.CreateMetaContentType(); service.Save(metaComposition); var seoComposition = MockedContentTypes.CreateSeoContentType(); service.Save(seoComposition); var metaAdded = contentPage.AddContentType(metaComposition); service.Save(contentPage); var seoAdded = advancedPage.AddContentType(seoComposition); service.Save(advancedPage); // Act var duplicatePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "title") { Name = "Title", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var addedToBasePage = basePage.AddPropertyType(duplicatePropertyType, "Content"); var addedToAdvancedPage = advancedPage.AddPropertyType(duplicatePropertyType, "Content"); var addedToMeta = metaComposition.AddPropertyType(duplicatePropertyType, "Meta"); var addedToSeo = seoComposition.AddPropertyType(duplicatePropertyType, "Seo"); // Assert Assert.That(metaAdded, Is.True); Assert.That(seoAdded, Is.True); Assert.That(addedToBasePage, Is.True); Assert.That(addedToAdvancedPage, Is.False); Assert.That(addedToMeta, Is.True); Assert.That(addedToSeo, Is.True); Assert.Throws<Exception>(() => service.Save(basePage)); Assert.Throws<Exception>(() => service.Save(metaComposition)); Assert.Throws<Exception>(() => service.Save(seoComposition)); Assert.DoesNotThrow(() => service.GetContentType("contentPage")); Assert.DoesNotThrow(() => service.GetContentType("advancedPage")); Assert.DoesNotThrow(() => service.GetContentType("meta")); Assert.DoesNotThrow(() => service.GetContentType("seo")); } [Test] public void Cannot_Add_Duplicate_PropertyType_Alias_At_Root_Which_Conflicts_With_Third_Levels_Composition() { /* * BasePage, gets 'Title' added but should not be allowed * -- Content Page * ---- Advanced Page -> Content Meta * Content Meta :: Composition, has 'Title' * * Content Meta has 'Title' PropertyType * Adding 'Title' to BasePage should fail */ // Arrange var service = ServiceContext.ContentTypeService; var basePage = MockedContentTypes.CreateBasicContentType(); service.Save(basePage); var contentPage = MockedContentTypes.CreateBasicContentType("contentPage", "Content Page", basePage); service.Save(contentPage); var advancedPage = MockedContentTypes.CreateBasicContentType("advancedPage", "Advanced Page", contentPage); service.Save(advancedPage); var contentMetaComposition = MockedContentTypes.CreateContentMetaContentType(); service.Save(contentMetaComposition); // Act var bodyTextPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "bodyText") { Name = "Body Text", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var bodyTextAdded = basePage.AddPropertyType(bodyTextPropertyType, "Content"); service.Save(basePage); var authorPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author") { Name = "Author", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var authorAdded = contentPage.AddPropertyType(authorPropertyType, "Content"); service.Save(contentPage); var compositionAdded = advancedPage.AddContentType(contentMetaComposition); service.Save(advancedPage); //NOTE: It should not be possible to Save 'BasePage' with the Title PropertyType added var titlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "title") { Name = "Title", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var titleAdded = basePage.AddPropertyType(titlePropertyType, "Content"); // Assert Assert.That(bodyTextAdded, Is.True); Assert.That(authorAdded, Is.True); Assert.That(titleAdded, Is.True); Assert.That(compositionAdded, Is.True); Assert.Throws<Exception>(() => service.Save(basePage)); Assert.DoesNotThrow(() => service.GetContentType("contentPage")); Assert.DoesNotThrow(() => service.GetContentType("advancedPage")); } [Test] public void Cannot_Rename_PropertyType_Alias_On_Composition_Which_Would_Cause_Conflict_In_Other_Composition() { /* * Meta renames alias to 'title' * Seo has 'Title' * BasePage * -- ContentPage * ---- AdvancedPage -> Seo * ------ MoreAdvanedPage -> Meta */ // Arrange var service = ServiceContext.ContentTypeService; var basePage = MockedContentTypes.CreateBasicContentType(); service.Save(basePage); var contentPage = MockedContentTypes.CreateBasicContentType("contentPage", "Content Page", basePage); service.Save(contentPage); var advancedPage = MockedContentTypes.CreateBasicContentType("advancedPage", "Advanced Page", contentPage); service.Save(advancedPage); var moreAdvancedPage = MockedContentTypes.CreateBasicContentType("moreAdvancedPage", "More Advanced Page", advancedPage); service.Save(moreAdvancedPage); var seoComposition = MockedContentTypes.CreateSeoContentType(); service.Save(seoComposition); var metaComposition = MockedContentTypes.CreateMetaContentType(); service.Save(metaComposition); // Act var bodyTextPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "bodyText") { Name = "Body Text", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var bodyTextAdded = basePage.AddPropertyType(bodyTextPropertyType, "Content"); service.Save(basePage); var authorPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author") { Name = "Author", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var authorAdded = contentPage.AddPropertyType(authorPropertyType, "Content"); service.Save(contentPage); var subtitlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "subtitle") { Name = "Subtitle", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var subtitleAdded = advancedPage.AddPropertyType(subtitlePropertyType, "Content"); service.Save(advancedPage); var titlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "title") { Name = "Title", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var titleAdded = seoComposition.AddPropertyType(titlePropertyType, "Content"); service.Save(seoComposition); var seoCompositionAdded = advancedPage.AddContentType(seoComposition); var metaCompositionAdded = moreAdvancedPage.AddContentType(metaComposition); service.Save(advancedPage); service.Save(moreAdvancedPage); var keywordsPropertyType = metaComposition.PropertyTypes.First(x => x.Alias.Equals("metakeywords")); keywordsPropertyType.Alias = "title"; // Assert Assert.That(bodyTextAdded, Is.True); Assert.That(subtitleAdded, Is.True); Assert.That(authorAdded, Is.True); Assert.That(titleAdded, Is.True); Assert.That(seoCompositionAdded, Is.True); Assert.That(metaCompositionAdded, Is.True); Assert.Throws<Exception>(() => service.Save(metaComposition)); Assert.DoesNotThrow(() => service.GetContentType("contentPage")); Assert.DoesNotThrow(() => service.GetContentType("advancedPage")); Assert.DoesNotThrow(() => service.GetContentType("moreAdvancedPage")); } [Test] public void Can_Add_Additional_Properties_On_Composition_Once_Composition_Has_Been_Saved() { /* * Meta renames alias to 'title' * Seo has 'Title' * BasePage * -- ContentPage * ---- AdvancedPage -> Seo * ------ MoreAdvancedPage -> Meta */ // Arrange var service = ServiceContext.ContentTypeService; var basePage = MockedContentTypes.CreateBasicContentType(); service.Save(basePage); var contentPage = MockedContentTypes.CreateBasicContentType("contentPage", "Content Page", basePage); service.Save(contentPage); var advancedPage = MockedContentTypes.CreateBasicContentType("advancedPage", "Advanced Page", contentPage); service.Save(advancedPage); var moreAdvancedPage = MockedContentTypes.CreateBasicContentType("moreAdvancedPage", "More Advanced Page", advancedPage); service.Save(moreAdvancedPage); var seoComposition = MockedContentTypes.CreateSeoContentType(); service.Save(seoComposition); var metaComposition = MockedContentTypes.CreateMetaContentType(); service.Save(metaComposition); // Act var bodyTextPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "bodyText") { Name = "Body Text", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var bodyTextAdded = basePage.AddPropertyType(bodyTextPropertyType, "Content"); service.Save(basePage); var authorPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author") { Name = "Author", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var authorAdded = contentPage.AddPropertyType(authorPropertyType, "Content"); service.Save(contentPage); var subtitlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "subtitle") { Name = "Subtitle", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var subtitleAdded = advancedPage.AddPropertyType(subtitlePropertyType, "Content"); service.Save(advancedPage); var titlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "title") { Name = "Title", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var titleAdded = seoComposition.AddPropertyType(titlePropertyType, "Content"); service.Save(seoComposition); var seoCompositionAdded = advancedPage.AddContentType(seoComposition); var metaCompositionAdded = moreAdvancedPage.AddContentType(metaComposition); service.Save(advancedPage); service.Save(moreAdvancedPage); // Assert Assert.That(bodyTextAdded, Is.True); Assert.That(subtitleAdded, Is.True); Assert.That(authorAdded, Is.True); Assert.That(titleAdded, Is.True); Assert.That(seoCompositionAdded, Is.True); Assert.That(metaCompositionAdded, Is.True); var testPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "test") { Name = "Test", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var testAdded = seoComposition.AddPropertyType(testPropertyType, "Content"); service.Save(seoComposition); Assert.That(testAdded, Is.True); Assert.DoesNotThrow(() => service.GetContentType("contentPage")); Assert.DoesNotThrow(() => service.GetContentType("advancedPage")); Assert.DoesNotThrow(() => service.GetContentType("moreAdvancedPage")); } [Test] public void Cannot_Rename_PropertyGroup_On_Child_Avoiding_Conflict_With_Parent_PropertyGroup() { // Arrange var service = ServiceContext.ContentTypeService; var page = MockedContentTypes.CreateSimpleContentType("page", "Page", null, true, "Content"); service.Save(page); var contentPage = MockedContentTypes.CreateSimpleContentType("contentPage", "Content Page", page, true, "Content_"); service.Save(contentPage); var advancedPage = MockedContentTypes.CreateSimpleContentType("advancedPage", "Advanced Page", contentPage, true, "Details"); service.Save(advancedPage); var contentMetaComposition = MockedContentTypes.CreateContentMetaContentType(); service.Save(contentMetaComposition); // Act var subtitlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "subtitle") { Name = "Subtitle", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var authorPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author") { Name = "Author", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var subtitleAdded = contentPage.AddPropertyType(subtitlePropertyType, "Content"); var authorAdded = contentPage.AddPropertyType(authorPropertyType, "Content"); service.Save(contentPage); var compositionAdded = contentPage.AddContentType(contentMetaComposition); service.Save(contentPage); //Change the name of the tab on the "root" content type 'page'. var propertyGroup = contentPage.PropertyGroups["Content_"]; Assert.Throws<Exception>(() => contentPage.PropertyGroups.Add(new PropertyGroup { Id = propertyGroup.Id, Name = "Content", SortOrder = 0 })); // Assert Assert.That(compositionAdded, Is.True); Assert.That(subtitleAdded, Is.True); Assert.That(authorAdded, Is.True); Assert.DoesNotThrow(() => service.GetContentType("contentPage")); Assert.DoesNotThrow(() => service.GetContentType("advancedPage")); } [Test] public void Cannot_Rename_PropertyType_Alias_Causing_Conflicts_With_Parents() { // Arrange var service = ServiceContext.ContentTypeService; var basePage = MockedContentTypes.CreateBasicContentType(); service.Save(basePage); var contentPage = MockedContentTypes.CreateBasicContentType("contentPage", "Content Page", basePage); service.Save(contentPage); var advancedPage = MockedContentTypes.CreateBasicContentType("advancedPage", "Advanced Page", contentPage); service.Save(advancedPage); // Act var titlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "title") { Name = "Title", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var titleAdded = basePage.AddPropertyType(titlePropertyType, "Content"); var bodyTextPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "bodyText") { Name = "Body Text", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var bodyTextAdded = contentPage.AddPropertyType(bodyTextPropertyType, "Content"); var subtitlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "subtitle") { Name = "Subtitle", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var subtitleAdded = contentPage.AddPropertyType(subtitlePropertyType, "Content"); var authorPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author") { Name = "Author", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var authorAdded = advancedPage.AddPropertyType(authorPropertyType, "Content"); service.Save(basePage); service.Save(contentPage); service.Save(advancedPage); //Rename the PropertyType to something that already exists in the Composition - NOTE this should not be allowed and Saving should throw an exception var authorPropertyTypeToRename = advancedPage.PropertyTypes.First(x => x.Alias.Equals("author")); authorPropertyTypeToRename.Alias = "title"; // Assert Assert.That(bodyTextAdded, Is.True); Assert.That(authorAdded, Is.True); Assert.That(titleAdded, Is.True); Assert.That(subtitleAdded, Is.True); Assert.Throws<Exception>(() => service.Save(advancedPage)); Assert.DoesNotThrow(() => service.GetContentType("contentPage")); Assert.DoesNotThrow(() => service.GetContentType("advancedPage")); } [Test] public void Can_Add_PropertyType_Alias_Which_Exists_In_Composition_Outside_Graph() { /* * Meta (Composition) * Content Meta (Composition) has 'Title' -> Meta * BasePage * -- ContentPage gets 'Title' added -> Meta * ---- Advanced Page */ // Arrange var service = ServiceContext.ContentTypeService; var basePage = MockedContentTypes.CreateSimpleContentType("basePage", "Base Page", null, true); service.Save(basePage); var contentPage = MockedContentTypes.CreateSimpleContentType("contentPage", "Content Page", basePage, true); service.Save(contentPage); var advancedPage = MockedContentTypes.CreateSimpleContentType("advancedPage", "Advanced Page", contentPage, true); service.Save(advancedPage); var metaComposition = MockedContentTypes.CreateMetaContentType(); service.Save(metaComposition); var contentMetaComposition = MockedContentTypes.CreateContentMetaContentType(); service.Save(contentMetaComposition); var metaAdded = contentPage.AddContentType(metaComposition); service.Save(contentPage); var metaAddedToComposition = contentMetaComposition.AddContentType(metaComposition); service.Save(contentMetaComposition); // Act var propertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "title") { Name = "Title", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var addedToContentPage = contentPage.AddPropertyType(propertyType, "Content"); // Assert Assert.That(metaAdded, Is.True); Assert.That(metaAddedToComposition, Is.True); Assert.That(addedToContentPage, Is.True); Assert.DoesNotThrow(() => service.Save(contentPage)); } [Test] public void Can_Rename_PropertyGroup_With_Inherited_PropertyGroups() { //Related the first issue in screencast from this post http://issues.umbraco.org/issue/U4-5986 // Arrange var service = ServiceContext.ContentTypeService; var page = MockedContentTypes.CreateSimpleContentType("page", "Page", null, false, "Content_"); service.Save(page); var contentPage = MockedContentTypes.CreateSimpleContentType("contentPage", "Content Page", page, true); service.Save(contentPage); var composition = MockedContentTypes.CreateMetaContentType(); composition.AddPropertyGroup("Content"); service.Save(composition); //Adding Meta-composition to child doc type contentPage.AddContentType(composition); service.Save(contentPage); // Act var propertyTypeOne = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "testTextbox") { Name = "Test Textbox", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var firstOneAdded = contentPage.AddPropertyType(propertyTypeOne, "Content_"); var propertyTypeTwo = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "anotherTextbox") { Name = "Another Test Textbox", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var secondOneAdded = contentPage.AddPropertyType(propertyTypeTwo, "Content"); service.Save(contentPage); Assert.That(page.PropertyGroups.Contains("Content_"), Is.True); var propertyGroup = page.PropertyGroups["Content_"]; page.PropertyGroups.Add(new PropertyGroup{ Id = propertyGroup.Id, Name = "ContentTab", SortOrder = 0}); service.Save(page); // Assert Assert.That(firstOneAdded, Is.True); Assert.That(secondOneAdded, Is.True); var contentType = service.GetContentType("contentPage"); Assert.That(contentType, Is.Not.Null); var compositionPropertyGroups = contentType.CompositionPropertyGroups; Assert.That(compositionPropertyGroups.Count(x => x.Name.Equals("Content_")), Is.EqualTo(0)); var propertyTypeCount = contentType.PropertyTypes.Count(); var compPropertyTypeCount = contentType.CompositionPropertyTypes.Count(); Assert.That(propertyTypeCount, Is.EqualTo(5)); Assert.That(compPropertyTypeCount, Is.EqualTo(10)); } [Test] public void Can_Rename_PropertyGroup_On_Parent_Without_Causing_Duplicate_PropertyGroups() { // Arrange var service = ServiceContext.ContentTypeService; var page = MockedContentTypes.CreateSimpleContentType("page", "Page", null, true, "Content_"); service.Save(page); var contentPage = MockedContentTypes.CreateSimpleContentType("contentPage", "Content Page", page, true, "Contentx"); service.Save(contentPage); var advancedPage = MockedContentTypes.CreateSimpleContentType("advancedPage", "Advanced Page", contentPage, true, "Contenty"); service.Save(advancedPage); var contentMetaComposition = MockedContentTypes.CreateContentMetaContentType(); service.Save(contentMetaComposition); var compositionAdded = contentPage.AddContentType(contentMetaComposition); service.Save(contentPage); // Act var bodyTextPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "bodyText") { Name = "Body Text", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var subtitlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "subtitle") { Name = "Subtitle", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var bodyTextAdded = contentPage.AddPropertyType(bodyTextPropertyType, "Content_");//Will be added to the parent tab var subtitleAdded = contentPage.AddPropertyType(subtitlePropertyType, "Content");//Will be added to the "Content Meta" composition service.Save(contentPage); var authorPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author") { Name = "Author", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var descriptionPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "description") { Name = "Description", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var keywordsPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "keywords") { Name = "Keywords", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var authorAdded = advancedPage.AddPropertyType(authorPropertyType, "Content_");//Will be added to an ancestor tab var descriptionAdded = advancedPage.AddPropertyType(descriptionPropertyType, "Contentx");//Will be added to a parent tab var keywordsAdded = advancedPage.AddPropertyType(keywordsPropertyType, "Content");//Will be added to the "Content Meta" composition service.Save(advancedPage); //Change the name of the tab on the "root" content type 'page'. var propertyGroup = page.PropertyGroups["Content_"]; page.PropertyGroups.Add(new PropertyGroup { Id = propertyGroup.Id, Name = "Content", SortOrder = 0 }); service.Save(page); // Assert Assert.That(compositionAdded, Is.True); Assert.That(bodyTextAdded, Is.True); Assert.That(subtitleAdded, Is.True); Assert.That(authorAdded, Is.True); Assert.That(descriptionAdded, Is.True); Assert.That(keywordsAdded, Is.True); Assert.DoesNotThrow(() => service.GetContentType("contentPage")); Assert.DoesNotThrow(() => service.GetContentType("advancedPage")); var advancedPageReloaded = service.GetContentType("advancedPage"); var contentUnderscoreTabExists = advancedPageReloaded.CompositionPropertyGroups.Any(x => x.Name.Equals("Content_")); Assert.That(contentUnderscoreTabExists, Is.False); var numberOfContentTabs = advancedPageReloaded.CompositionPropertyGroups.Count(x => x.Name.Equals("Content")); Assert.That(numberOfContentTabs, Is.EqualTo(4)); } [Test] public void Can_Rename_PropertyGroup_On_Parent_Without_Causing_Duplicate_PropertyGroups_v2() { // Arrange var service = ServiceContext.ContentTypeService; var page = MockedContentTypes.CreateSimpleContentType("page", "Page", null, true, "Content_"); service.Save(page); var contentPage = MockedContentTypes.CreateSimpleContentType("contentPage", "Content Page", page, true, "Content"); service.Save(contentPage); var contentMetaComposition = MockedContentTypes.CreateContentMetaContentType(); service.Save(contentMetaComposition); // Act var bodyTextPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "bodyText") { Name = "Body Text", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var subtitlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "subtitle") { Name = "Subtitle", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var authorPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author") { Name = "Author", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var bodyTextAdded = page.AddPropertyType(bodyTextPropertyType, "Content_"); var subtitleAdded = contentPage.AddPropertyType(subtitlePropertyType, "Content"); var authorAdded = contentPage.AddPropertyType(authorPropertyType, "Content_"); service.Save(page); service.Save(contentPage); var compositionAdded = contentPage.AddContentType(contentMetaComposition); service.Save(contentPage); //Change the name of the tab on the "root" content type 'page'. var propertyGroup = page.PropertyGroups["Content_"]; page.PropertyGroups.Add(new PropertyGroup { Id = propertyGroup.Id, Name = "Content", SortOrder = 0 }); service.Save(page); // Assert Assert.That(compositionAdded, Is.True); Assert.That(bodyTextAdded, Is.True); Assert.That(subtitleAdded, Is.True); Assert.That(authorAdded, Is.True); Assert.DoesNotThrow(() => service.GetContentType("contentPage")); } [Test] public void Can_Remove_PropertyGroup_On_Parent_Without_Causing_Duplicate_PropertyGroups() { // Arrange var service = ServiceContext.ContentTypeService; var basePage = MockedContentTypes.CreateBasicContentType(); service.Save(basePage); var contentPage = MockedContentTypes.CreateBasicContentType("contentPage", "Content Page", basePage); service.Save(contentPage); var advancedPage = MockedContentTypes.CreateBasicContentType("advancedPage", "Advanced Page", contentPage); service.Save(advancedPage); var contentMetaComposition = MockedContentTypes.CreateContentMetaContentType(); service.Save(contentMetaComposition); // Act var bodyTextPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "bodyText") { Name = "Body Text", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var bodyTextAdded = basePage.AddPropertyType(bodyTextPropertyType, "Content"); service.Save(basePage); var authorPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author") { Name = "Author", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var authorAdded = contentPage.AddPropertyType(authorPropertyType, "Content"); service.Save(contentPage); var compositionAdded = contentPage.AddContentType(contentMetaComposition); service.Save(contentPage); basePage.RemovePropertyGroup("Content"); service.Save(basePage); // Assert Assert.That(bodyTextAdded, Is.True); Assert.That(authorAdded, Is.True); Assert.That(compositionAdded, Is.True); Assert.DoesNotThrow(() => service.GetContentType("contentPage")); Assert.DoesNotThrow(() => service.GetContentType("advancedPage")); var contentType = service.GetContentType("contentPage"); var propertyGroup = contentType.PropertyGroups["Content"]; Assert.That(propertyGroup.ParentId.HasValue, Is.False); } [Test] public void Can_Add_PropertyGroup_With_Same_Name_On_Parent_and_Child() { /* * BasePage * - Content Page * -- Advanced Page * Content Meta :: Composition */ // Arrange var service = ServiceContext.ContentTypeService; var basePage = MockedContentTypes.CreateBasicContentType(); service.Save(basePage); var contentPage = MockedContentTypes.CreateBasicContentType("contentPage", "Content Page", basePage); service.Save(contentPage); var advancedPage = MockedContentTypes.CreateBasicContentType("advancedPage", "Advanced Page", contentPage); service.Save(advancedPage); var contentMetaComposition = MockedContentTypes.CreateContentMetaContentType(); service.Save(contentMetaComposition); // Act var authorPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author") { Name = "Author", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var authorAdded = contentPage.AddPropertyType(authorPropertyType, "Content"); service.Save(contentPage); var bodyTextPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "bodyText") { Name = "Body Text", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }; var bodyTextAdded = basePage.AddPropertyType(bodyTextPropertyType, "Content"); service.Save(basePage); var compositionAdded = contentPage.AddContentType(contentMetaComposition); service.Save(contentPage); // Assert Assert.That(bodyTextAdded, Is.True); Assert.That(authorAdded, Is.True); Assert.That(compositionAdded, Is.True); Assert.DoesNotThrow(() => service.GetContentType("contentPage")); Assert.DoesNotThrow(() => service.GetContentType("advancedPage")); var contentType = service.GetContentType("contentPage"); var propertyGroup = contentType.PropertyGroups["Content"]; Assert.That(propertyGroup.ParentId.HasValue, Is.False); var numberOfContentTabs = contentType.CompositionPropertyGroups.Count(x => x.Name.Equals("Content")); Assert.That(numberOfContentTabs, Is.EqualTo(3)); //Ensure that adding a new PropertyType to the "Content"-tab also adds it to the right group var descriptionPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext) { Alias = "description", Name = "Description", Description = "", Mandatory = false, SortOrder = 1,DataTypeDefinitionId = -88 }; var descriptionAdded = contentType.AddPropertyType(descriptionPropertyType, "Content"); service.Save(contentType); Assert.That(descriptionAdded, Is.True); var contentPageReloaded = service.GetContentType("contentPage"); var propertyGroupReloaded = contentPageReloaded.PropertyGroups["Content"]; var hasDescriptionPropertyType = propertyGroupReloaded.PropertyTypes.Contains("description"); Assert.That(hasDescriptionPropertyType, Is.True); Assert.That(propertyGroupReloaded.ParentId.HasValue, Is.False); var descriptionPropertyTypeReloaded = propertyGroupReloaded.PropertyTypes["description"]; Assert.That(descriptionPropertyTypeReloaded.PropertyGroupId.IsValueCreated, Is.False); } private ContentType CreateComponent() { var component = new ContentType(-1) { Alias = "component", Name = "Component", Description = "ContentType used for Component grouping", Icon = ".sprTreeDoc3", Thumbnail = "doc.png", SortOrder = 1, CreatorId = 0, Trashed = false }; var contentCollection = new PropertyTypeCollection(); contentCollection.Add(new PropertyType("test", DataTypeDatabaseType.Ntext, "componentGroup") { Name = "Component Group", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }); component.PropertyGroups.Add(new PropertyGroup(contentCollection) { Name = "Component", SortOrder = 1 }); return component; } private ContentType CreateBannerComponent(ContentType parent) { const string contentTypeAlias = "banner"; var banner = new ContentType(parent, contentTypeAlias) { Alias = contentTypeAlias, Name = "Banner Component", Description = "ContentType used for Banner Component", Icon = ".sprTreeDoc3", Thumbnail = "doc.png", SortOrder = 1, CreatorId = 0, Trashed = false }; var propertyType = new PropertyType("test", DataTypeDatabaseType.Ntext, "bannerName") { Name = "Banner Name", Description = "", Mandatory = false, SortOrder = 2, DataTypeDefinitionId = -88 }; banner.AddPropertyType(propertyType, "Component"); return banner; } private ContentType CreateSite() { var site = new ContentType(-1) { Alias = "site", Name = "Site", Description = "ContentType used for Site inheritence", Icon = ".sprTreeDoc3", Thumbnail = "doc.png", SortOrder = 2, CreatorId = 0, Trashed = false }; var contentCollection = new PropertyTypeCollection(); contentCollection.Add(new PropertyType("test", DataTypeDatabaseType.Ntext, "hostname") { Name = "Hostname", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }); site.PropertyGroups.Add(new PropertyGroup(contentCollection) { Name = "Site Settings", SortOrder = 1 }); return site; } private ContentType CreateHomepage(ContentType parent) { const string contentTypeAlias = "homepage"; var contentType = new ContentType(parent, contentTypeAlias) { Alias = contentTypeAlias, Name = "Homepage", Description = "ContentType used for the Homepage", Icon = ".sprTreeDoc3", Thumbnail = "doc.png", SortOrder = 1, CreatorId = 0, Trashed = false }; var contentCollection = new PropertyTypeCollection(); contentCollection.Add(new PropertyType("test", DataTypeDatabaseType.Ntext, "title") { Name = "Title", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }); contentCollection.Add(new PropertyType("test", DataTypeDatabaseType.Ntext, "bodyText") { Name = "Body Text", Description = "", Mandatory = false, SortOrder = 2, DataTypeDefinitionId = -87 }); contentCollection.Add(new PropertyType("test", DataTypeDatabaseType.Ntext, "author") { Name = "Author", Description = "Name of the author", Mandatory = false, SortOrder = 3, DataTypeDefinitionId = -88 }); contentType.PropertyGroups.Add(new PropertyGroup(contentCollection) { Name = "Content", SortOrder = 1 }); return contentType; } private IContentType[] CreateContentTypeHierarchy() { //create the master type var masterContentType = MockedContentTypes.CreateSimpleContentType("masterContentType", "MasterContentType"); masterContentType.Key = new Guid("C00CA18E-5A9D-483B-A371-EECE0D89B4AE"); ServiceContext.ContentTypeService.Save(masterContentType); //add the one we just created var list = new List<IContentType> { masterContentType }; for (var i = 0; i < 10; i++) { var contentType = MockedContentTypes.CreateSimpleContentType("childType" + i, "ChildType" + i, //make the last entry in the list, this one's parent list.Last(), true); list.Add(contentType); } return list.ToArray(); } } }
using YAF.Lucene.Net.Diagnostics; using YAF.Lucene.Net.Support; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.IO; namespace YAF.Lucene.Net.Index { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using AppendingPackedInt64Buffer = YAF.Lucene.Net.Util.Packed.AppendingPackedInt64Buffer; using BytesRef = YAF.Lucene.Net.Util.BytesRef; using IBits = YAF.Lucene.Net.Util.IBits; using MonotonicAppendingInt64Buffer = YAF.Lucene.Net.Util.Packed.MonotonicAppendingInt64Buffer; using PackedInt32s = YAF.Lucene.Net.Util.Packed.PackedInt32s; using TermsEnumIndex = YAF.Lucene.Net.Index.MultiTermsEnum.TermsEnumIndex; using TermsEnumWithSlice = YAF.Lucene.Net.Index.MultiTermsEnum.TermsEnumWithSlice; /// <summary> /// A wrapper for <see cref="CompositeReader"/> providing access to <see cref="DocValues"/>. /// /// <para/><b>NOTE</b>: for multi readers, you'll get better /// performance by gathering the sub readers using /// <see cref="IndexReader.Context"/> to get the /// atomic leaves and then operate per-AtomicReader, /// instead of using this class. /// /// <para/><b>NOTE</b>: this is very costly. /// /// <para/> /// @lucene.experimental /// @lucene.internal /// </summary> public static class MultiDocValues // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable { /// <summary> /// Returns a <see cref="NumericDocValues"/> for a reader's norms (potentially merging on-the-fly). /// <para> /// This is a slow way to access normalization values. Instead, access them per-segment /// with <seealso cref="AtomicReader.GetNormValues(string)"/> /// </para> /// </summary> public static NumericDocValues GetNormValues(IndexReader r, string field) { IList<AtomicReaderContext> leaves = r.Leaves; int size = leaves.Count; if (size == 0) { return null; } else if (size == 1) { return leaves[0].AtomicReader.GetNormValues(field); } FieldInfo fi = MultiFields.GetMergedFieldInfos(r).FieldInfo(field); if (fi is null || fi.HasNorms == false) { return null; } bool anyReal = false; NumericDocValues[] values = new NumericDocValues[size]; int[] starts = new int[size + 1]; for (int i = 0; i < size; i++) { AtomicReaderContext context = leaves[i]; NumericDocValues v = context.AtomicReader.GetNormValues(field); if (v is null) { v = DocValues.EMPTY_NUMERIC; } else { anyReal = true; } values[i] = v; starts[i] = context.DocBase; } starts[size] = r.MaxDoc; if (Debugging.AssertsEnabled) Debugging.Assert(anyReal); return new NumericDocValuesAnonymousClass(values, starts); } private class NumericDocValuesAnonymousClass : NumericDocValues { private readonly NumericDocValues[] values; private readonly int[] starts; public NumericDocValuesAnonymousClass(NumericDocValues[] values, int[] starts) { this.values = values; this.starts = starts; } public override long Get(int docID) { int subIndex = ReaderUtil.SubIndex(docID, starts); return values[subIndex].Get(docID - starts[subIndex]); } } /// <summary> /// Returns a <see cref="NumericDocValues"/> for a reader's docvalues (potentially merging on-the-fly) /// <para> /// This is a slow way to access numeric values. Instead, access them per-segment /// with <see cref="AtomicReader.GetNumericDocValues(string)"/> /// </para> /// </summary> public static NumericDocValues GetNumericValues(IndexReader r, string field) { IList<AtomicReaderContext> leaves = r.Leaves; int size = leaves.Count; if (size == 0) { return null; } else if (size == 1) { return leaves[0].AtomicReader.GetNumericDocValues(field); } bool anyReal = false; NumericDocValues[] values = new NumericDocValues[size]; int[] starts = new int[size + 1]; for (int i = 0; i < size; i++) { AtomicReaderContext context = leaves[i]; NumericDocValues v = context.AtomicReader.GetNumericDocValues(field); if (v is null) { v = DocValues.EMPTY_NUMERIC; } else { anyReal = true; } values[i] = v; starts[i] = context.DocBase; } starts[size] = r.MaxDoc; if (!anyReal) { return null; } else { return new NumericDocValuesAnonymousClass2(values, starts); } } private class NumericDocValuesAnonymousClass2 : NumericDocValues { private readonly NumericDocValues[] values; private readonly int[] starts; public NumericDocValuesAnonymousClass2(NumericDocValues[] values, int[] starts) { this.values = values; this.starts = starts; } public override long Get(int docID) { int subIndex = ReaderUtil.SubIndex(docID, starts); return values[subIndex].Get(docID - starts[subIndex]); } } /// <summary> /// Returns a <see cref="IBits"/> for a reader's docsWithField (potentially merging on-the-fly) /// <para> /// This is a slow way to access this bitset. Instead, access them per-segment /// with <see cref="AtomicReader.GetDocsWithField(string)"/> /// </para> /// </summary> public static IBits GetDocsWithField(IndexReader r, string field) { IList<AtomicReaderContext> leaves = r.Leaves; int size = leaves.Count; if (size == 0) { return null; } else if (size == 1) { return leaves[0].AtomicReader.GetDocsWithField(field); } bool anyReal = false; bool anyMissing = false; IBits[] values = new IBits[size]; int[] starts = new int[size + 1]; for (int i = 0; i < size; i++) { AtomicReaderContext context = leaves[i]; IBits v = context.AtomicReader.GetDocsWithField(field); if (v is null) { v = new Lucene.Net.Util.Bits.MatchNoBits(context.Reader.MaxDoc); anyMissing = true; } else { anyReal = true; if (v is Lucene.Net.Util.Bits.MatchAllBits == false) { anyMissing = true; } } values[i] = v; starts[i] = context.DocBase; } starts[size] = r.MaxDoc; if (!anyReal) { return null; } else if (!anyMissing) { return new Lucene.Net.Util.Bits.MatchAllBits(r.MaxDoc); } else { return new MultiBits(values, starts, false); } } /// <summary> /// Returns a <see cref="BinaryDocValues"/> for a reader's docvalues (potentially merging on-the-fly) /// <para> /// This is a slow way to access binary values. Instead, access them per-segment /// with <see cref="AtomicReader.GetBinaryDocValues(string)"/> /// </para> /// </summary> public static BinaryDocValues GetBinaryValues(IndexReader r, string field) { IList<AtomicReaderContext> leaves = r.Leaves; int size = leaves.Count; if (size == 0) { return null; } else if (size == 1) { return leaves[0].AtomicReader.GetBinaryDocValues(field); } bool anyReal = false; BinaryDocValues[] values = new BinaryDocValues[size]; int[] starts = new int[size + 1]; for (int i = 0; i < size; i++) { AtomicReaderContext context = leaves[i]; BinaryDocValues v = context.AtomicReader.GetBinaryDocValues(field); if (v is null) { v = DocValues.EMPTY_BINARY; } else { anyReal = true; } values[i] = v; starts[i] = context.DocBase; } starts[size] = r.MaxDoc; if (!anyReal) { return null; } else { return new BinaryDocValuesAnonymousClass(values, starts); } } private class BinaryDocValuesAnonymousClass : BinaryDocValues { private readonly BinaryDocValues[] values; private readonly int[] starts; public BinaryDocValuesAnonymousClass(BinaryDocValues[] values, int[] starts) { this.values = values; this.starts = starts; } public override void Get(int docID, BytesRef result) { int subIndex = ReaderUtil.SubIndex(docID, starts); values[subIndex].Get(docID - starts[subIndex], result); } } /// <summary> /// Returns a <see cref="SortedDocValues"/> for a reader's docvalues (potentially doing extremely slow things). /// <para> /// this is an extremely slow way to access sorted values. Instead, access them per-segment /// with <see cref="AtomicReader.GetSortedDocValues(string)"/> /// </para> /// </summary> public static SortedDocValues GetSortedValues(IndexReader r, string field) { IList<AtomicReaderContext> leaves = r.Leaves; int size = leaves.Count; if (size == 0) { return null; } else if (size == 1) { return leaves[0].AtomicReader.GetSortedDocValues(field); } bool anyReal = false; var values = new SortedDocValues[size]; int[] starts = new int[size + 1]; for (int i = 0; i < size; i++) { AtomicReaderContext context = leaves[i]; SortedDocValues v = context.AtomicReader.GetSortedDocValues(field); if (v is null) { v = DocValues.EMPTY_SORTED; } else { anyReal = true; } values[i] = v; starts[i] = context.DocBase; } starts[size] = r.MaxDoc; if (!anyReal) { return null; } else { TermsEnum[] enums = new TermsEnum[values.Length]; for (int i = 0; i < values.Length; i++) { enums[i] = values[i].GetTermsEnum(); } OrdinalMap mapping = new OrdinalMap(r.CoreCacheKey, enums); return new MultiSortedDocValues(values, starts, mapping); } } /// <summary> /// Returns a <see cref="SortedSetDocValues"/> for a reader's docvalues (potentially doing extremely slow things). /// <para> /// This is an extremely slow way to access sorted values. Instead, access them per-segment /// with <see cref="AtomicReader.GetSortedSetDocValues(string)"/> /// </para> /// </summary> public static SortedSetDocValues GetSortedSetValues(IndexReader r, string field) { IList<AtomicReaderContext> leaves = r.Leaves; int size = leaves.Count; if (size == 0) { return null; } else if (size == 1) { return leaves[0].AtomicReader.GetSortedSetDocValues(field); } bool anyReal = false; SortedSetDocValues[] values = new SortedSetDocValues[size]; int[] starts = new int[size + 1]; for (int i = 0; i < size; i++) { AtomicReaderContext context = leaves[i]; SortedSetDocValues v = context.AtomicReader.GetSortedSetDocValues(field); if (v is null) { v = DocValues.EMPTY_SORTED_SET; } else { anyReal = true; } values[i] = v; starts[i] = context.DocBase; } starts[size] = r.MaxDoc; if (!anyReal) { return null; } else { TermsEnum[] enums = new TermsEnum[values.Length]; for (int i = 0; i < values.Length; i++) { enums[i] = values[i].GetTermsEnum(); } OrdinalMap mapping = new OrdinalMap(r.CoreCacheKey, enums); return new MultiSortedSetDocValues(values, starts, mapping); } } /// <summary> /// maps per-segment ordinals to/from global ordinal space </summary> // TODO: use more efficient packed ints structures? // TODO: pull this out? its pretty generic (maps between N ord()-enabled TermsEnums) public class OrdinalMap { // cache key of whoever asked for this awful thing internal readonly object owner; // globalOrd -> (globalOrd - segmentOrd) where segmentOrd is the the ordinal in the first segment that contains this term internal readonly MonotonicAppendingInt64Buffer globalOrdDeltas; // globalOrd -> first segment container internal readonly AppendingPackedInt64Buffer firstSegments; // for every segment, segmentOrd -> (globalOrd - segmentOrd) internal readonly MonotonicAppendingInt64Buffer[] ordDeltas; /// <summary> /// Creates an ordinal map that allows mapping ords to/from a merged /// space from <c>subs</c>. </summary> /// <param name="owner"> a cache key </param> /// <param name="subs"> <see cref="TermsEnum"/>s that support <see cref="TermsEnum.Ord"/>. They need /// not be dense (e.g. can be FilteredTermsEnums). </param> /// <exception cref="IOException"> if an I/O error occurred. </exception> public OrdinalMap(object owner, TermsEnum[] subs) { // create the ordinal mappings by pulling a termsenum over each sub's // unique terms, and walking a multitermsenum over those this.owner = owner; globalOrdDeltas = new MonotonicAppendingInt64Buffer(PackedInt32s.COMPACT); firstSegments = new AppendingPackedInt64Buffer(PackedInt32s.COMPACT); ordDeltas = new MonotonicAppendingInt64Buffer[subs.Length]; for (int i = 0; i < ordDeltas.Length; i++) { ordDeltas[i] = new MonotonicAppendingInt64Buffer(); } long[] segmentOrds = new long[subs.Length]; ReaderSlice[] slices = new ReaderSlice[subs.Length]; TermsEnumIndex[] indexes = new TermsEnumIndex[slices.Length]; for (int i = 0; i < slices.Length; i++) { slices[i] = new ReaderSlice(0, 0, i); indexes[i] = new TermsEnumIndex(subs[i], i); } MultiTermsEnum mte = new MultiTermsEnum(slices); mte.Reset(indexes); long globalOrd = 0; while (mte.MoveNext()) { TermsEnumWithSlice[] matches = mte.MatchArray; for (int i = 0; i < mte.MatchCount; i++) { int segmentIndex = matches[i].Index; long segmentOrd = matches[i].Terms.Ord; long delta = globalOrd - segmentOrd; // for each unique term, just mark the first segment index/delta where it occurs if (i == 0) { firstSegments.Add(segmentIndex); globalOrdDeltas.Add(delta); } // for each per-segment ord, map it back to the global term. while (segmentOrds[segmentIndex] <= segmentOrd) { ordDeltas[segmentIndex].Add(delta); segmentOrds[segmentIndex]++; } } globalOrd++; } firstSegments.Freeze(); globalOrdDeltas.Freeze(); for (int i = 0; i < ordDeltas.Length; ++i) { ordDeltas[i].Freeze(); } } /// <summary> /// Given a segment number and segment ordinal, returns /// the corresponding global ordinal. /// </summary> public virtual long GetGlobalOrd(int segmentIndex, long segmentOrd) { return segmentOrd + ordDeltas[segmentIndex].Get(segmentOrd); } /// <summary> /// Given global ordinal, returns the ordinal of the first segment which contains /// this ordinal (the corresponding to the segment return <see cref="GetFirstSegmentNumber(long)"/>). /// </summary> public virtual long GetFirstSegmentOrd(long globalOrd) { return globalOrd - globalOrdDeltas.Get(globalOrd); } /// <summary> /// Given a global ordinal, returns the index of the first /// segment that contains this term. /// </summary> public virtual int GetFirstSegmentNumber(long globalOrd) { return (int)firstSegments.Get(globalOrd); } /// <summary> /// Returns the total number of unique terms in global ord space. /// </summary> public virtual long ValueCount => globalOrdDeltas.Count; /// <summary> /// Returns total byte size used by this ordinal map. /// </summary> public virtual long RamBytesUsed() { long size = globalOrdDeltas.RamBytesUsed() + firstSegments.RamBytesUsed(); for (int i = 0; i < ordDeltas.Length; i++) { size += ordDeltas[i].RamBytesUsed(); } return size; } } /// <summary> /// Implements <see cref="SortedDocValues"/> over n subs, using an <see cref="OrdinalMap"/> /// <para/> /// @lucene.internal /// </summary> public class MultiSortedDocValues : SortedDocValues { /// <summary> /// docbase for each leaf: parallel with <see cref="Values"/> </summary> [WritableArray] [SuppressMessage("Microsoft.Performance", "CA1819", Justification = "Lucene's design requires some writable array properties")] public int[] DocStarts => docStarts; private readonly int[] docStarts; /// <summary> /// leaf values </summary> [WritableArray] [SuppressMessage("Microsoft.Performance", "CA1819", Justification = "Lucene's design requires some writable array properties")] public SortedDocValues[] Values => values; private readonly SortedDocValues[] values; /// <summary> /// ordinal map mapping ords from <c>values</c> to global ord space </summary> public OrdinalMap Mapping => mapping; private readonly OrdinalMap mapping; /// <summary> /// Creates a new <see cref="MultiSortedDocValues"/> over <paramref name="values"/> </summary> internal MultiSortedDocValues(SortedDocValues[] values, int[] docStarts, OrdinalMap mapping) { if (Debugging.AssertsEnabled) { Debugging.Assert(values.Length == mapping.ordDeltas.Length); Debugging.Assert(docStarts.Length == values.Length + 1); } this.values = values; this.docStarts = docStarts; this.mapping = mapping; } public override int GetOrd(int docID) { int subIndex = ReaderUtil.SubIndex(docID, docStarts); int segmentOrd = values[subIndex].GetOrd(docID - docStarts[subIndex]); return segmentOrd == -1 ? segmentOrd : (int)mapping.GetGlobalOrd(subIndex, segmentOrd); } public override void LookupOrd(int ord, BytesRef result) { int subIndex = mapping.GetFirstSegmentNumber(ord); int segmentOrd = (int)mapping.GetFirstSegmentOrd(ord); values[subIndex].LookupOrd(segmentOrd, result); } public override int ValueCount => (int)mapping.ValueCount; } /// <summary> /// Implements <see cref="MultiSortedSetDocValues"/> over n subs, using an <see cref="OrdinalMap"/> /// <para/> /// @lucene.internal /// </summary> public class MultiSortedSetDocValues : SortedSetDocValues { /// <summary> /// docbase for each leaf: parallel with <see cref="Values"/> </summary> [WritableArray] [SuppressMessage("Microsoft.Performance", "CA1819", Justification = "Lucene's design requires some writable array properties")] public int[] DocStarts => docStarts; private readonly int[] docStarts; /// <summary> /// leaf values </summary> [WritableArray] [SuppressMessage("Microsoft.Performance", "CA1819", Justification = "Lucene's design requires some writable array properties")] public SortedSetDocValues[] Values => values; private readonly SortedSetDocValues[] values; /// <summary> /// ordinal map mapping ords from <c>values</c> to global ord space </summary> public OrdinalMap Mapping => mapping; private readonly OrdinalMap mapping; internal int currentSubIndex; /// <summary> /// Creates a new <see cref="MultiSortedSetDocValues"/> over <paramref name="values"/> </summary> internal MultiSortedSetDocValues(SortedSetDocValues[] values, int[] docStarts, OrdinalMap mapping) { if (Debugging.AssertsEnabled) { Debugging.Assert(values.Length == mapping.ordDeltas.Length); Debugging.Assert(docStarts.Length == values.Length + 1); } this.values = values; this.docStarts = docStarts; this.mapping = mapping; } public override long NextOrd() { long segmentOrd = values[currentSubIndex].NextOrd(); if (segmentOrd == NO_MORE_ORDS) { return segmentOrd; } else { return mapping.GetGlobalOrd(currentSubIndex, segmentOrd); } } public override void SetDocument(int docID) { currentSubIndex = ReaderUtil.SubIndex(docID, docStarts); values[currentSubIndex].SetDocument(docID - docStarts[currentSubIndex]); } public override void LookupOrd(long ord, BytesRef result) { int subIndex = mapping.GetFirstSegmentNumber(ord); long segmentOrd = mapping.GetFirstSegmentOrd(ord); values[subIndex].LookupOrd(segmentOrd, result); } public override long ValueCount => mapping.ValueCount; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using IndexReader = Lucene.Net.Index.IndexReader; using Term = Lucene.Net.Index.Term; using TermDocs = Lucene.Net.Index.TermDocs; using TermEnum = Lucene.Net.Index.TermEnum; namespace Lucene.Net.Search { /// <summary> /// /// /// </summary> public class ExtendedFieldCacheImpl : FieldCacheImpl, ExtendedFieldCache { public ExtendedFieldCacheImpl() { InitBlock(); } public class AnonymousClassLongParser : LongParser { public virtual long ParseLong(System.String value_Renamed) { return System.Int64.Parse(value_Renamed); } } public class AnonymousClassDoubleParser : DoubleParser { public virtual double ParseDouble(System.String value_Renamed) { return System.Double.Parse(value_Renamed.Replace(".", System.Globalization.CultureInfo.CurrentCulture.NumberFormat.NumberDecimalSeparator)); } } new internal class AnonymousClassCache : Cache { public AnonymousClassCache(Lucene.Net.Search.FieldCacheImpl enclosingInstance) { InitBlock(enclosingInstance); } private void InitBlock(Lucene.Net.Search.FieldCacheImpl enclosingInstance) { this.enclosingInstance = enclosingInstance; } private Lucene.Net.Search.FieldCacheImpl enclosingInstance; public Lucene.Net.Search.FieldCacheImpl Enclosing_Instance { get { return enclosingInstance; } } protected internal override object CreateValue(IndexReader reader, object entryKey) { Entry entry = (Entry)entryKey; System.String field = entry.field; LongParser parser = (LongParser)entry.custom; long[] retArray = new long[reader.MaxDoc()]; TermDocs termDocs = reader.TermDocs(); TermEnum termEnum = reader.Terms(new Term(field)); try { do { Term term = termEnum.Term(); if (term == null || (object)term.Field() != (object)field) break; long termval = parser.ParseLong(term.Text()); termDocs.Seek(termEnum); while (termDocs.Next()) { retArray[termDocs.Doc()] = termval; } } while (termEnum.Next()); } finally { termDocs.Close(); termEnum.Close(); } return retArray; } } new internal class AnonymousClassCache1 : Cache { public AnonymousClassCache1(Lucene.Net.Search.FieldCacheImpl enclosingInstance) { InitBlock(enclosingInstance); } private void InitBlock(Lucene.Net.Search.FieldCacheImpl enclosingInstance) { this.enclosingInstance = enclosingInstance; } private Lucene.Net.Search.FieldCacheImpl enclosingInstance; public Lucene.Net.Search.FieldCacheImpl Enclosing_Instance { get { return enclosingInstance; } } protected internal override object CreateValue(IndexReader reader, object entryKey) { Entry entry = (Entry)entryKey; System.String field = entry.field; DoubleParser parser = (DoubleParser)entry.custom; double[] retArray = new double[reader.MaxDoc()]; TermDocs termDocs = reader.TermDocs(); TermEnum termEnum = reader.Terms(new Term(field)); try { do { Term term = termEnum.Term(); if (term == null || (object)term.Field() != (object)field) break; double termval = parser.ParseDouble(term.Text()); termDocs.Seek(termEnum); while (termDocs.Next()) { retArray[termDocs.Doc()] = termval; } } while (termEnum.Next()); } finally { termDocs.Close(); termEnum.Close(); } return retArray; } } new internal class AnonymousClassCache2 : Cache { public AnonymousClassCache2(Lucene.Net.Search.FieldCacheImpl enclosingInstance) { InitBlock(enclosingInstance); } private void InitBlock(Lucene.Net.Search.FieldCacheImpl enclosingInstance) { this.enclosingInstance = enclosingInstance; } private Lucene.Net.Search.FieldCacheImpl enclosingInstance; public Lucene.Net.Search.FieldCacheImpl Enclosing_Instance { get { return enclosingInstance; } } protected internal override object CreateValue(IndexReader reader, object fieldKey) { System.String field = String.Intern(((System.String)fieldKey)); TermEnum enumerator = reader.Terms(new Term(field)); try { Term term = enumerator.Term(); if (term == null) { throw new System.SystemException("no terms in field " + field + " - cannot determine sort type"); } object ret = null; if ((object)term.Field() == (object)field) { System.String termtext = term.Text().Trim(); /** * Java 1.4 level code: if (pIntegers.matcher(termtext).matches()) return IntegerSortedHitQueue.comparator (reader, enumerator, field); else if (pFloats.matcher(termtext).matches()) return FloatSortedHitQueue.comparator (reader, enumerator, field); */ // Java 1.3 level code: try { int parsedIntValue; long parsedLongValue; if (int.TryParse(termtext, out parsedIntValue)) { ret = Enclosing_Instance.GetInts(reader, field); } else if (long.TryParse(termtext, out parsedLongValue)) { ret = ((ExtendedFieldCacheImpl)Enclosing_Instance).GetLongs(reader, field); } else { float f = 0.0f; if (SupportClass.Single.TryParse(termtext, out f)) { ret = Enclosing_Instance.GetFloats(reader, field); } else { ret = Enclosing_Instance.GetStringIndex(reader, field); } } } catch (System.Exception) { ret = Enclosing_Instance.GetStringIndex(reader, field); } } else { throw new System.SystemException("field \"" + field + "\" does not appear to be indexed"); } return ret; } finally { enumerator.Close(); } } } private void InitBlock() { longsCache = new AnonymousClassCache(this); doublesCache = new AnonymousClassCache1(this); autoCache = new AnonymousClassCache2(this); } private static readonly LongParser LONG_PARSER; private static readonly DoubleParser DOUBLE_PARSER; public virtual long[] GetLongs(IndexReader reader, System.String field) { return GetLongs(reader, field, LONG_PARSER); } // inherit javadocs public virtual long[] GetLongs(IndexReader reader, System.String field, LongParser parser) { return (long[])longsCache.Get(reader, new Entry(field, parser)); } internal Cache longsCache; // inherit javadocs public virtual double[] GetDoubles(IndexReader reader, System.String field) { return GetDoubles(reader, field, DOUBLE_PARSER); } // inherit javadocs public virtual double[] GetDoubles(IndexReader reader, System.String field, DoubleParser parser) { return (double[])doublesCache.Get(reader, new Entry(field, parser)); } internal Cache doublesCache; // inherit javadocs public override object GetAuto(IndexReader reader, System.String field) { return autoCache.Get(reader, field); } new internal Cache autoCache; static ExtendedFieldCacheImpl() { LONG_PARSER = new AnonymousClassLongParser(); DOUBLE_PARSER = new AnonymousClassDoubleParser(); } } }
namespace Lucene.Net.Index { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using IBits = Lucene.Net.Util.IBits; using BytesRef = Lucene.Net.Util.BytesRef; /// <summary> /// This class contains utility methods and constants for <see cref="DocValues"/> /// </summary> public sealed class DocValues { /* no instantiation */ private DocValues() { } /// <summary> /// An empty <see cref="BinaryDocValues"/> which returns <see cref="BytesRef.EMPTY_BYTES"/> for every document /// </summary> public static readonly BinaryDocValues EMPTY_BINARY = new BinaryDocValuesAnonymousInnerClassHelper(); private class BinaryDocValuesAnonymousInnerClassHelper : BinaryDocValues { public BinaryDocValuesAnonymousInnerClassHelper() { } public override void Get(int docID, BytesRef result) { result.Bytes = BytesRef.EMPTY_BYTES; result.Offset = 0; result.Length = 0; } } /// <summary> /// An empty <see cref="NumericDocValues"/> which returns zero for every document /// </summary> public static readonly NumericDocValues EMPTY_NUMERIC = new NumericDocValuesAnonymousInnerClassHelper(); private class NumericDocValuesAnonymousInnerClassHelper : NumericDocValues { public NumericDocValuesAnonymousInnerClassHelper() { } public override long Get(int docID) { return 0; } } /// <summary> /// An empty <see cref="SortedDocValues"/> which returns <see cref="BytesRef.EMPTY_BYTES"/> for every document /// </summary> public static readonly SortedDocValues EMPTY_SORTED = new SortedDocValuesAnonymousInnerClassHelper(); private class SortedDocValuesAnonymousInnerClassHelper : SortedDocValues { public SortedDocValuesAnonymousInnerClassHelper() { } public override int GetOrd(int docID) { return -1; } public override void LookupOrd(int ord, BytesRef result) { result.Bytes = BytesRef.EMPTY_BYTES; result.Offset = 0; result.Length = 0; } public override int ValueCount { get { return 0; } } } /// <summary> /// An empty <see cref="SortedDocValues"/> which returns <see cref="SortedSetDocValues.NO_MORE_ORDS"/> for every document /// </summary> public static readonly SortedSetDocValues EMPTY_SORTED_SET = new RandomAccessOrdsAnonymousInnerClassHelper(); private class RandomAccessOrdsAnonymousInnerClassHelper : RandomAccessOrds { public RandomAccessOrdsAnonymousInnerClassHelper() { } public override long NextOrd() { return NO_MORE_ORDS; } public override void SetDocument(int docID) { } public override void LookupOrd(long ord, BytesRef result) { throw new System.IndexOutOfRangeException(); } public override long ValueCount { get { return 0; } } public override long OrdAt(int index) { throw new System.IndexOutOfRangeException(); } public override int Cardinality() { return 0; } } /// <summary> /// Returns a multi-valued view over the provided <see cref="SortedDocValues"/> /// </summary> public static SortedSetDocValues Singleton(SortedDocValues dv) { return new SingletonSortedSetDocValues(dv); } /// <summary> /// Returns a single-valued view of the <see cref="SortedSetDocValues"/>, if it was previously /// wrapped with <see cref="Singleton"/>, or <c>null</c>. /// </summary> public static SortedDocValues UnwrapSingleton(SortedSetDocValues dv) { if (dv is SingletonSortedSetDocValues) { return ((SingletonSortedSetDocValues)dv).SortedDocValues; } else { return null; } } /// <summary> /// Returns a <see cref="IBits"/> representing all documents from <paramref name="dv"/> that have a value. /// </summary> public static IBits DocsWithValue(SortedDocValues dv, int maxDoc) { return new BitsAnonymousInnerClassHelper(dv, maxDoc); } private class BitsAnonymousInnerClassHelper : IBits { private Lucene.Net.Index.SortedDocValues dv; private int maxDoc; public BitsAnonymousInnerClassHelper(Lucene.Net.Index.SortedDocValues dv, int maxDoc) { this.dv = dv; this.maxDoc = maxDoc; } public virtual bool Get(int index) { return dv.GetOrd(index) >= 0; } public virtual int Length { get { return maxDoc; } } } /// <summary> /// Returns a <see cref="IBits"/> representing all documents from <paramref name="dv"/> that have a value. /// </summary> public static IBits DocsWithValue(SortedSetDocValues dv, int maxDoc) { return new BitsAnonymousInnerClassHelper2(dv, maxDoc); } private class BitsAnonymousInnerClassHelper2 : IBits { private Lucene.Net.Index.SortedSetDocValues dv; private int maxDoc; public BitsAnonymousInnerClassHelper2(Lucene.Net.Index.SortedSetDocValues dv, int maxDoc) { this.dv = dv; this.maxDoc = maxDoc; } public virtual bool Get(int index) { dv.SetDocument(index); return dv.NextOrd() != SortedSetDocValues.NO_MORE_ORDS; } public virtual int Length { get { return maxDoc; } } } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Collections.Immutable; using System.Linq; using Microsoft.CodeAnalysis.CSharp; using Microsoft.CodeAnalysis.CSharp.Diagnostics.RemoveUnnecessaryImports; using Microsoft.CodeAnalysis.CSharp.Diagnostics.SimplifyTypeNames; using Microsoft.CodeAnalysis.Diagnostics; using Microsoft.CodeAnalysis.Editor.UnitTests.Squiggles; using Microsoft.CodeAnalysis.Editor.UnitTests.Workspaces; using Microsoft.CodeAnalysis.Text; using Microsoft.VisualStudio.Text.Adornments; using Microsoft.VisualStudio.Text.Tagging; using Roslyn.Test.Utilities; using Roslyn.Utilities; using Xunit; namespace Microsoft.CodeAnalysis.Editor.CSharp.UnitTests.Squiggles { public class ErrorSquiggleProducerTests : AbstractSquiggleProducerTests { [Fact, Trait(Traits.Feature, Traits.Features.ErrorSquiggles)] public void ErrorTagGeneratedForError() { var spans = GetErrorSpans("class C {"); Assert.Equal(1, spans.Count()); var firstSpan = spans.First(); Assert.Equal(PredefinedErrorTypeNames.SyntaxError, firstSpan.Tag.ErrorType); } [Fact, Trait(Traits.Feature, Traits.Features.ErrorSquiggles)] public void ErrorTagGeneratedForWarning() { var spans = GetErrorSpans("class C { long x = 5l; }"); Assert.Equal(1, spans.Count()); Assert.Equal(PredefinedErrorTypeNames.Warning, spans.First().Tag.ErrorType); } [Fact, Trait(Traits.Feature, Traits.Features.ErrorSquiggles)] public void ErrorTagGeneratedForWarningAsError() { var workspaceXml = @"<Workspace> <Project Language=""C#"" CommonReferences=""true""> <CompilationOptions ReportDiagnostic = ""Error"" /> <Document FilePath = ""Test.cs"" > class Program { void Test() { int a = 5; } } </Document> </Project> </Workspace>"; using (var workspace = TestWorkspaceFactory.CreateWorkspace(workspaceXml)) { var spans = GetErrorSpans(workspace); Assert.Equal(1, spans.Count()); Assert.Equal(PredefinedErrorTypeNames.SyntaxError, spans.First().Tag.ErrorType); } } [Fact, Trait(Traits.Feature, Traits.Features.ErrorSquiggles)] public void SuggestionTagsForUnnecessaryCode() { var workspaceXml = @"<Workspace> <Project Language=""C#"" CommonReferences=""true""> <Document FilePath = ""Test.cs"" > // System is used - rest are unused. using System.Collections; using System; using System.Diagnostics; using System.Collections.Generic; class Program { void Test() { Int32 x = 2; // Int32 can be simplified. x += 1; } } </Document> </Project> </Workspace>"; using (var workspace = TestWorkspaceFactory.CreateWorkspace(workspaceXml)) { var analyzerMap = new Dictionary<string, DiagnosticAnalyzer[]> { { LanguageNames.CSharp, new DiagnosticAnalyzer[] { new CSharpSimplifyTypeNamesDiagnosticAnalyzer(), new CSharpRemoveUnnecessaryImportsDiagnosticAnalyzer() } } }; var spans = GetErrorSpans(workspace, analyzerMap) .OrderBy(s => s.Span.Span.Start).ToImmutableArray(); Assert.Equal(3, spans.Length); var first = spans[0]; var second = spans[1]; var third = spans[2]; Assert.Equal(PredefinedErrorTypeNames.Suggestion, first.Tag.ErrorType); Assert.Equal(CSharpFeaturesResources.RemoveUnnecessaryUsingsDiagnosticTitle, first.Tag.ToolTipContent); Assert.Equal(40, first.Span.Start); Assert.Equal(25, first.Span.Length); Assert.Equal(PredefinedErrorTypeNames.Suggestion, second.Tag.ErrorType); Assert.Equal(CSharpFeaturesResources.RemoveUnnecessaryUsingsDiagnosticTitle, second.Tag.ToolTipContent); Assert.Equal(82, second.Span.Start); Assert.Equal(60, second.Span.Length); Assert.Equal(PredefinedErrorTypeNames.Suggestion, third.Tag.ErrorType); Assert.Equal(WorkspacesResources.NameCanBeSimplified, third.Tag.ToolTipContent); Assert.Equal(196, third.Span.Start); Assert.Equal(5, third.Span.Length); } } [Fact, Trait(Traits.Feature, Traits.Features.ErrorSquiggles)] public void ErrorDoesNotCrashPastEOF() { var spans = GetErrorSpans("class C { int x ="); Assert.Equal(3, spans.Count()); } [Fact, Trait(Traits.Feature, Traits.Features.ErrorSquiggles)] public void SemanticErrorReported() { var spans = GetErrorSpans("class C : Bar { }"); Assert.Equal(1, spans.Count()); var firstSpan = spans.First(); Assert.Equal(PredefinedErrorTypeNames.SyntaxError, firstSpan.Tag.ErrorType); Assert.Contains("Bar", (string)firstSpan.Tag.ToolTipContent, StringComparison.Ordinal); } [Fact, Trait(Traits.Feature, Traits.Features.ErrorSquiggles)] public void BuildErrorZeroLengthSpan() { var workspaceXml = @"<Workspace> <Project Language=""C#"" CommonReferences=""true""> <Document FilePath = ""Test.cs"" > class Test { } </Document> </Project> </Workspace>"; using (var workspace = TestWorkspaceFactory.CreateWorkspace(workspaceXml)) { var document = workspace.Documents.First(); var updateArgs = new DiagnosticsUpdatedArgs( new object(), workspace, workspace.CurrentSolution, document.Project.Id, document.Id, ImmutableArray.Create( CreateDiagnosticData(workspace, document, new TextSpan(0, 0)), CreateDiagnosticData(workspace, document, new TextSpan(0, 1)))); var spans = GetErrorsFromUpdateSource(workspace, document, updateArgs); Assert.Equal(1, spans.Count()); var first = spans.First(); Assert.Equal(1, first.Span.Span.Length); } } [Fact, Trait(Traits.Feature, Traits.Features.ErrorSquiggles)] public void LiveErrorZeroLengthSpan() { var workspaceXml = @"<Workspace> <Project Language=""C#"" CommonReferences=""true""> <Document FilePath = ""Test.cs"" > class Test { } </Document> </Project> </Workspace>"; using (var workspace = TestWorkspaceFactory.CreateWorkspace(workspaceXml)) { var document = workspace.Documents.First(); var updateArgs = new DiagnosticsUpdatedArgs( new LiveId(), workspace, workspace.CurrentSolution, document.Project.Id, document.Id, ImmutableArray.Create( CreateDiagnosticData(workspace, document, new TextSpan(0, 0)), CreateDiagnosticData(workspace, document, new TextSpan(0, 1)))); var spans = GetErrorsFromUpdateSource(workspace, document, updateArgs); Assert.Equal(2, spans.Count()); var first = spans.First(); var second = spans.Last(); Assert.Equal(1, first.Span.Span.Length); Assert.Equal(1, second.Span.Span.Length); } } private class LiveId : ISupportLiveUpdate { public LiveId() { } } private static IEnumerable<ITagSpan<IErrorTag>> GetErrorSpans(params string[] content) { using (var workspace = CSharpWorkspaceFactory.CreateWorkspaceFromLines(content)) { return GetErrorSpans(workspace); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. // This file defines an internal class used to throw exceptions in BCL code. // The main purpose is to reduce code size. // // The old way to throw an exception generates quite a lot IL code and assembly code. // Following is an example: // C# source // throw new ArgumentNullException(nameof(key), SR.ArgumentNull_Key); // IL code: // IL_0003: ldstr "key" // IL_0008: ldstr "ArgumentNull_Key" // IL_000d: call string System.Environment::GetResourceString(string) // IL_0012: newobj instance void System.ArgumentNullException::.ctor(string,string) // IL_0017: throw // which is 21bytes in IL. // // So we want to get rid of the ldstr and call to Environment.GetResource in IL. // In order to do that, I created two enums: ExceptionResource, ExceptionArgument to represent the // argument name and resource name in a small integer. The source code will be changed to // ThrowHelper.ThrowArgumentNullException(ExceptionArgument.key, ExceptionResource.ArgumentNull_Key); // // The IL code will be 7 bytes. // IL_0008: ldc.i4.4 // IL_0009: ldc.i4.4 // IL_000a: call void System.ThrowHelper::ThrowArgumentNullException(valuetype System.ExceptionArgument) // IL_000f: ldarg.0 // // This will also reduce the Jitted code size a lot. // // It is very important we do this for generic classes because we can easily generate the same code // multiple times for different instantiation. // using System.Collections.Generic; using System.Runtime.CompilerServices; using System.Runtime.Serialization; using System.Diagnostics; using System.Diagnostics.Contracts; namespace System { [Pure] internal static class ThrowHelper { internal static void ThrowArrayTypeMismatchException() { throw new ArrayTypeMismatchException(); } internal static void ThrowInvalidTypeWithPointersNotSupported(Type targetType) { throw new ArgumentException(SR.Format(SR.Argument_InvalidTypeWithPointersNotSupported, targetType)); } internal static void ThrowIndexOutOfRangeException() { throw new IndexOutOfRangeException(); } internal static void ThrowArgumentOutOfRangeException() { throw new ArgumentOutOfRangeException(); } internal static void ThrowArgumentException_DestinationTooShort() { throw new ArgumentException(SR.Argument_DestinationTooShort); } internal static void ThrowArgumentOutOfRange_IndexException() { throw GetArgumentOutOfRangeException(ExceptionArgument.index, ExceptionResource.ArgumentOutOfRange_Index); } internal static void ThrowIndexArgumentOutOfRange_NeedNonNegNumException() { throw GetArgumentOutOfRangeException(ExceptionArgument.index, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum); } internal static void ThrowLengthArgumentOutOfRange_ArgumentOutOfRange_NeedNonNegNum() { throw GetArgumentOutOfRangeException(ExceptionArgument.length, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum); } internal static void ThrowStartIndexArgumentOutOfRange_ArgumentOutOfRange_Index() { throw GetArgumentOutOfRangeException(ExceptionArgument.startIndex, ExceptionResource.ArgumentOutOfRange_Index); } internal static void ThrowCountArgumentOutOfRange_ArgumentOutOfRange_Count() { throw GetArgumentOutOfRangeException(ExceptionArgument.count, ExceptionResource.ArgumentOutOfRange_Count); } internal static void ThrowWrongKeyTypeArgumentException(object key, Type targetType) { throw GetWrongKeyTypeArgumentException(key, targetType); } internal static void ThrowWrongValueTypeArgumentException(object value, Type targetType) { throw GetWrongValueTypeArgumentException(value, targetType); } private static ArgumentException GetAddingDuplicateWithKeyArgumentException(object key) { return new ArgumentException(SR.Format(SR.Argument_AddingDuplicateWithKey, key)); } internal static void ThrowAddingDuplicateWithKeyArgumentException(object key) { throw GetAddingDuplicateWithKeyArgumentException(key); } internal static void ThrowKeyNotFoundException() { throw new KeyNotFoundException(); } internal static void ThrowArgumentException(ExceptionResource resource) { throw GetArgumentException(resource); } internal static void ThrowArgumentException(ExceptionResource resource, ExceptionArgument argument) { throw GetArgumentException(resource, argument); } private static ArgumentNullException GetArgumentNullException(ExceptionArgument argument) { return new ArgumentNullException(GetArgumentName(argument)); } internal static void ThrowArgumentNullException(ExceptionArgument argument) { throw GetArgumentNullException(argument); } internal static void ThrowArgumentNullException(ExceptionResource resource) { throw new ArgumentNullException(GetResourceString(resource)); } internal static void ThrowArgumentNullException(ExceptionArgument argument, ExceptionResource resource) { throw new ArgumentNullException(GetArgumentName(argument), GetResourceString(resource)); } internal static void ThrowArgumentOutOfRangeException(ExceptionArgument argument) { throw new ArgumentOutOfRangeException(GetArgumentName(argument)); } internal static void ThrowArgumentOutOfRangeException(ExceptionArgument argument, ExceptionResource resource) { throw GetArgumentOutOfRangeException(argument, resource); } internal static void ThrowArgumentOutOfRangeException(ExceptionArgument argument, int paramNumber, ExceptionResource resource) { throw GetArgumentOutOfRangeException(argument, paramNumber, resource); } internal static void ThrowInvalidOperationException(ExceptionResource resource) { throw GetInvalidOperationException(resource); } internal static void ThrowInvalidOperationException(ExceptionResource resource, Exception e) { throw new InvalidOperationException(GetResourceString(resource), e); } internal static void ThrowSerializationException(ExceptionResource resource) { throw new SerializationException(GetResourceString(resource)); } internal static void ThrowSecurityException(ExceptionResource resource) { throw new System.Security.SecurityException(GetResourceString(resource)); } internal static void ThrowRankException(ExceptionResource resource) { throw new RankException(GetResourceString(resource)); } internal static void ThrowNotSupportedException(ExceptionResource resource) { throw new NotSupportedException(GetResourceString(resource)); } internal static void ThrowUnauthorizedAccessException(ExceptionResource resource) { throw new UnauthorizedAccessException(GetResourceString(resource)); } internal static void ThrowObjectDisposedException(string objectName, ExceptionResource resource) { throw new ObjectDisposedException(objectName, GetResourceString(resource)); } internal static void ThrowObjectDisposedException(ExceptionResource resource) { throw new ObjectDisposedException(null, GetResourceString(resource)); } internal static void ThrowNotSupportedException() { throw new NotSupportedException(); } internal static void ThrowAggregateException(List<Exception> exceptions) { throw new AggregateException(exceptions); } internal static void ThrowOutOfMemoryException() { throw new OutOfMemoryException(); } internal static void ThrowArgumentException_Argument_InvalidArrayType() { throw GetArgumentException(ExceptionResource.Argument_InvalidArrayType); } internal static void ThrowInvalidOperationException_InvalidOperation_EnumNotStarted() { throw GetInvalidOperationException(ExceptionResource.InvalidOperation_EnumNotStarted); } internal static void ThrowInvalidOperationException_InvalidOperation_EnumEnded() { throw GetInvalidOperationException(ExceptionResource.InvalidOperation_EnumEnded); } internal static void ThrowInvalidOperationException_EnumCurrent(int index) { throw GetInvalidOperationException_EnumCurrent(index); } internal static void ThrowInvalidOperationException_InvalidOperation_EnumFailedVersion() { throw GetInvalidOperationException(ExceptionResource.InvalidOperation_EnumFailedVersion); } internal static void ThrowInvalidOperationException_InvalidOperation_EnumOpCantHappen() { throw GetInvalidOperationException(ExceptionResource.InvalidOperation_EnumOpCantHappen); } internal static void ThrowArraySegmentCtorValidationFailedExceptions(Array array, int offset, int count) { throw GetArraySegmentCtorValidationFailedException(array, offset, count); } private static Exception GetArraySegmentCtorValidationFailedException(Array array, int offset, int count) { if (array == null) return GetArgumentNullException(ExceptionArgument.array); if (offset < 0) return GetArgumentOutOfRangeException(ExceptionArgument.offset, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum); if (count < 0) return GetArgumentOutOfRangeException(ExceptionArgument.count, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum); Debug.Assert(array.Length - offset < count); return GetArgumentException(ExceptionResource.Argument_InvalidOffLen); } private static ArgumentException GetArgumentException(ExceptionResource resource) { return new ArgumentException(GetResourceString(resource)); } internal static InvalidOperationException GetInvalidOperationException(ExceptionResource resource) { return new InvalidOperationException(GetResourceString(resource)); } private static ArgumentException GetWrongKeyTypeArgumentException(object key, Type targetType) { return new ArgumentException(SR.Format(SR.Arg_WrongType, key, targetType), nameof(key)); } private static ArgumentException GetWrongValueTypeArgumentException(object value, Type targetType) { return new ArgumentException(SR.Format(SR.Arg_WrongType, value, targetType), nameof(value)); } internal static ArgumentOutOfRangeException GetArgumentOutOfRangeException(ExceptionArgument argument, ExceptionResource resource) { return new ArgumentOutOfRangeException(GetArgumentName(argument), GetResourceString(resource)); } private static ArgumentException GetArgumentException(ExceptionResource resource, ExceptionArgument argument) { return new ArgumentException(GetResourceString(resource), GetArgumentName(argument)); } private static ArgumentOutOfRangeException GetArgumentOutOfRangeException(ExceptionArgument argument, int paramNumber, ExceptionResource resource) { return new ArgumentOutOfRangeException(GetArgumentName(argument) + "[" + paramNumber.ToString() + "]", GetResourceString(resource)); } private static InvalidOperationException GetInvalidOperationException_EnumCurrent(int index) { return GetInvalidOperationException( index < 0 ? ExceptionResource.InvalidOperation_EnumNotStarted : ExceptionResource.InvalidOperation_EnumEnded); } // Allow nulls for reference types and Nullable<U>, but not for value types. // Aggressively inline so the jit evaluates the if in place and either drops the call altogether // Or just leaves null test and call to the Non-returning ThrowHelper.ThrowArgumentNullException [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void IfNullAndNullsAreIllegalThenThrow<T>(object value, ExceptionArgument argName) { // Note that default(T) is not equal to null for value types except when T is Nullable<U>. if (!(default(T) == null) && value == null) ThrowHelper.ThrowArgumentNullException(argName); } // This function will convert an ExceptionArgument enum value to the argument name string. [MethodImpl(MethodImplOptions.NoInlining)] private static string GetArgumentName(ExceptionArgument argument) { Debug.Assert(Enum.IsDefined(typeof(ExceptionArgument), argument), "The enum value is not defined, please check the ExceptionArgument Enum."); return argument.ToString(); } // This function will convert an ExceptionResource enum value to the resource string. [MethodImpl(MethodImplOptions.NoInlining)] private static string GetResourceString(ExceptionResource resource) { Debug.Assert(Enum.IsDefined(typeof(ExceptionResource), resource), "The enum value is not defined, please check the ExceptionResource Enum."); return SR.GetResourceString(resource.ToString()); } } // // The convention for this enum is using the argument name as the enum name // internal enum ExceptionArgument { obj, dictionary, array, info, key, collection, list, match, converter, capacity, index, startIndex, value, count, arrayIndex, name, item, options, view, sourceBytesToCopy, action, comparison, offset, newSize, elementType, length, length1, length2, length3, lengths, len, lowerBounds, sourceArray, destinationArray, sourceIndex, destinationIndex, indices, index1, index2, index3, other, comparer, endIndex, keys, creationOptions, timeout, tasks, scheduler, continuationFunction, millisecondsTimeout, millisecondsDelay, function, exceptions, exception, cancellationToken, delay, asyncResult, endMethod, endFunction, beginMethod, continuationOptions, continuationAction, concurrencyLevel, text, callBack, type, stateMachine, pHandle, values, task, s, keyValuePair, input, ownedMemory } // // The convention for this enum is using the resource name as the enum name // internal enum ExceptionResource { Argument_ImplementIComparable, Argument_InvalidType, Argument_InvalidArgumentForComparison, Argument_InvalidRegistryKeyPermissionCheck, ArgumentOutOfRange_NeedNonNegNum, Arg_ArrayPlusOffTooSmall, Arg_NonZeroLowerBound, Arg_RankMultiDimNotSupported, Arg_RegKeyDelHive, Arg_RegKeyStrLenBug, Arg_RegSetStrArrNull, Arg_RegSetMismatchedKind, Arg_RegSubKeyAbsent, Arg_RegSubKeyValueAbsent, Argument_AddingDuplicate, Serialization_InvalidOnDeser, Serialization_MissingKeys, Serialization_NullKey, Argument_InvalidArrayType, NotSupported_KeyCollectionSet, NotSupported_ValueCollectionSet, ArgumentOutOfRange_SmallCapacity, ArgumentOutOfRange_Index, Argument_InvalidOffLen, Argument_ItemNotExist, ArgumentOutOfRange_Count, ArgumentOutOfRange_InvalidThreshold, ArgumentOutOfRange_ListInsert, NotSupported_ReadOnlyCollection, InvalidOperation_CannotRemoveFromStackOrQueue, InvalidOperation_EmptyQueue, InvalidOperation_EnumOpCantHappen, InvalidOperation_EnumFailedVersion, InvalidOperation_EmptyStack, ArgumentOutOfRange_BiggerThanCollection, InvalidOperation_EnumNotStarted, InvalidOperation_EnumEnded, NotSupported_SortedListNestedWrite, InvalidOperation_NoValue, InvalidOperation_RegRemoveSubKey, Security_RegistryPermission, UnauthorizedAccess_RegistryNoWrite, ObjectDisposed_RegKeyClosed, NotSupported_InComparableType, Argument_InvalidRegistryOptionsCheck, Argument_InvalidRegistryViewCheck, InvalidOperation_NullArray, Arg_MustBeType, Arg_NeedAtLeast1Rank, ArgumentOutOfRange_HugeArrayNotSupported, Arg_RanksAndBounds, Arg_RankIndices, Arg_Need1DArray, Arg_Need2DArray, Arg_Need3DArray, NotSupported_FixedSizeCollection, ArgumentException_OtherNotArrayOfCorrectLength, Rank_MultiDimNotSupported, InvalidOperation_IComparerFailed, ArgumentOutOfRange_EndIndexStartIndex, Arg_LowerBoundsMustMatch, Arg_BogusIComparer, Task_WaitMulti_NullTask, Task_ThrowIfDisposed, Task_Start_TaskCompleted, Task_Start_Promise, Task_Start_ContinuationTask, Task_Start_AlreadyStarted, Task_RunSynchronously_TaskCompleted, Task_RunSynchronously_Continuation, Task_RunSynchronously_Promise, Task_RunSynchronously_AlreadyStarted, Task_MultiTaskContinuation_NullTask, Task_MultiTaskContinuation_EmptyTaskList, Task_Dispose_NotCompleted, Task_Delay_InvalidMillisecondsDelay, Task_Delay_InvalidDelay, Task_ctor_LRandSR, Task_ContinueWith_NotOnAnything, Task_ContinueWith_ESandLR, TaskT_TransitionToFinal_AlreadyCompleted, TaskCompletionSourceT_TrySetException_NullException, TaskCompletionSourceT_TrySetException_NoExceptions, Memory_ThrowIfDisposed, Memory_OutstandingReferences, InvalidOperation_WrongAsyncResultOrEndCalledMultiple, ConcurrentDictionary_ConcurrencyLevelMustBePositive, ConcurrentDictionary_CapacityMustNotBeNegative, ConcurrentDictionary_TypeOfValueIncorrect, ConcurrentDictionary_TypeOfKeyIncorrect, ConcurrentDictionary_KeyAlreadyExisted, ConcurrentDictionary_ItemKeyIsNull, ConcurrentDictionary_IndexIsNegative, ConcurrentDictionary_ArrayNotLargeEnough, ConcurrentDictionary_ArrayIncorrectType, ConcurrentCollection_SyncRoot_NotSupported, ArgumentOutOfRange_Enum, InvalidOperation_HandleIsNotInitialized, AsyncMethodBuilder_InstanceNotInitialized, ArgumentNull_SafeHandle, } }
/* * Unity VSCode Support * * Seamless support for Microsoft Visual Studio Code in Unity * * Version: * 2.8 * * Authors: * Matthew Davey <[email protected]> */ namespace dotBunny.Unity { using System; using System.IO; using System.Text.RegularExpressions; using UnityEditor; using UnityEngine; [InitializeOnLoad] public static class VSCode { /// <summary> /// Current Version Number /// </summary> public const float Version = 2.8f; /// <summary> /// Current Version Code /// </summary> public const string VersionCode = "-RELEASE"; /// <summary> /// Additional File Extensions /// </summary> public const string FileExtensions = ".ts, .bjs, .javascript, .json, .html, .shader, .template"; /// <summary> /// Download URL for Unity Debbuger /// </summary> public const string UnityDebuggerURL = "https://unity.gallery.vsassets.io/_apis/public/gallery/publisher/unity/extension/unity-debug/latest/assetbyname/Microsoft.VisualStudio.Services.VSIXPackage"; // Used to keep Unity from crashing when the editor is quit static bool alreadyFixedPreferences; #region Properties /// <summary> /// Path to VSCode executable public static string CodePath { get { string current = EditorPrefs.GetString("VSCode_CodePath", ""); if(current == "" || !VSCodeExists(current)) { //Value not set, set to "" or current path is invalid, try to autodetect it //If autodetect fails, a error will be printed and the default value set EditorPrefs.SetString("VSCode_CodePath", AutodetectCodePath()); //If its not installed or the install folder isn't a "normal" one, //AutodetectCodePath will print a error message to the Unity Console } return EditorPrefs.GetString("VSCode_CodePath", current); } set { EditorPrefs.SetString("VSCode_CodePath", value); } } /// <summary> /// Get Program Files Path /// </summary> /// <returns>The platforms "Program Files" path.</returns> static string ProgramFilesx86() { if( 8 == IntPtr.Size || (!String.IsNullOrEmpty(Environment.GetEnvironmentVariable("PROCESSOR_ARCHITEW6432")))) { return Environment.GetEnvironmentVariable("ProgramFiles(x86)"); } return Environment.GetEnvironmentVariable("ProgramFiles"); } /// <summary> /// Should debug information be displayed in the Unity terminal? /// </summary> public static bool Debug { get { return EditorPrefs.GetBool("VSCode_Debug", false); } set { EditorPrefs.SetBool("VSCode_Debug", value); } } /// <summary> /// Is the Visual Studio Code Integration Enabled? /// </summary> /// <remarks> /// We do not want to automatically turn it on, for in larger projects not everyone is using VSCode /// </remarks> public static bool Enabled { get { return EditorPrefs.GetBool("VSCode_Enabled", false); } set { // When turning the plugin on, we should remove all the previous project files if (!Enabled && value) { ClearProjectFiles(); } EditorPrefs.SetBool("VSCode_Enabled", value); } } public static bool UseUnityDebugger { get { return EditorPrefs.GetBool("VSCode_UseUnityDebugger", false); } set { if ( value != UseUnityDebugger ) { // Set value EditorPrefs.SetBool("VSCode_UseUnityDebugger", value); // Do not write the launch JSON file because the debugger uses its own if ( value ) { WriteLaunchFile = false; } // Update launch file UpdateLaunchFile(); } } } /// <summary> /// When opening a project in Unity, should it automatically open in VS Code. /// </summary> public static bool AutoOpenEnabled { get { return EditorPrefs.GetBool("VSCode_AutoOpenEnabled", false); } set { EditorPrefs.SetBool("VSCode_AutoOpenEnabled", value); } } /// <summary> /// Should the launch.json file be written? /// </summary> /// <remarks> /// Useful to disable if someone has their own custom one rigged up /// </remarks> public static bool WriteLaunchFile { get { return EditorPrefs.GetBool("VSCode_WriteLaunchFile", true); } set { EditorPrefs.SetBool("VSCode_WriteLaunchFile", value); } } /// <summary> /// Should the plugin automatically update itself. /// </summary> static bool AutomaticUpdates { get { return EditorPrefs.GetBool("VSCode_AutomaticUpdates", false); } set { EditorPrefs.SetBool("VSCode_AutomaticUpdates", value); } } static float GitHubVersion { get { return EditorPrefs.GetFloat("VSCode_GitHubVersion", Version); } set { EditorPrefs.SetFloat("VSCode_GitHubVersion", value); } } /// <summary> /// When was the last time that the plugin was updated? /// </summary> static DateTime LastUpdate { get { // Feature creation date. DateTime lastTime = new DateTime(2015, 10, 8); if (EditorPrefs.HasKey("VSCode_LastUpdate")) { DateTime.TryParse(EditorPrefs.GetString("VSCode_LastUpdate"), out lastTime); } return lastTime; } set { EditorPrefs.SetString("VSCode_LastUpdate", value.ToString()); } } /// <summary> /// Quick reference to the VSCode launch settings file /// </summary> static string LaunchPath { get { return SettingsFolder + System.IO.Path.DirectorySeparatorChar + "launch.json"; } } /// <summary> /// The full path to the project /// </summary> static string ProjectPath { get { return System.IO.Path.GetDirectoryName(UnityEngine.Application.dataPath); } } /// <summary> /// Should the script editor be reverted when quiting Unity. /// </summary> /// <remarks> /// Useful for environments where you do not use VSCode for everything. /// </remarks> static bool RevertExternalScriptEditorOnExit { get { return EditorPrefs.GetBool("VSCode_RevertScriptEditorOnExit", true); } set { EditorPrefs.SetBool("VSCode_RevertScriptEditorOnExit", value); } } /// <summary> /// Quick reference to the VSCode settings folder /// </summary> static string SettingsFolder { get { return ProjectPath + System.IO.Path.DirectorySeparatorChar + ".vscode"; } } static string SettingsPath { get { return SettingsFolder + System.IO.Path.DirectorySeparatorChar + "settings.json"; } } static int UpdateTime { get { return EditorPrefs.GetInt("VSCode_UpdateTime", 7); } set { EditorPrefs.SetInt("VSCode_UpdateTime", value); } } #endregion /// <summary> /// Integration Constructor /// </summary> static VSCode() { if (Enabled) { UpdateUnityPreferences(true); UpdateLaunchFile(); // Add Update Check DateTime targetDate = LastUpdate.AddDays(UpdateTime); if (DateTime.Now >= targetDate && AutomaticUpdates) { CheckForUpdate(); } // Open VS Code automatically when project is loaded if (AutoOpenEnabled) { CheckForAutoOpen(); } } // Event for when script is reloaded System.AppDomain.CurrentDomain.DomainUnload += System_AppDomain_CurrentDomain_DomainUnload; } static void System_AppDomain_CurrentDomain_DomainUnload(object sender, System.EventArgs e) { if (Enabled && RevertExternalScriptEditorOnExit) { UpdateUnityPreferences(false); } } #region Public Members /// <summary> /// Force Unity To Write Project File /// </summary> /// <remarks> /// Reflection! /// </remarks> public static void SyncSolution() { System.Type T = System.Type.GetType("UnityEditor.SyncVS,UnityEditor"); System.Reflection.MethodInfo SyncSolution = T.GetMethod("SyncSolution", System.Reflection.BindingFlags.Public | System.Reflection.BindingFlags.Static); SyncSolution.Invoke(null, null); } /// <summary> /// Update the solution files so that they work with VS Code /// </summary> public static void UpdateSolution() { // No need to process if we are not enabled if (!VSCode.Enabled) { return; } if (VSCode.Debug) { UnityEngine.Debug.Log("[VSCode] Updating Solution & Project Files"); } var currentDirectory = Directory.GetCurrentDirectory(); var solutionFiles = Directory.GetFiles(currentDirectory, "*.sln"); var projectFiles = Directory.GetFiles(currentDirectory, "*.csproj"); foreach (var filePath in solutionFiles) { string content = File.ReadAllText(filePath); content = ScrubSolutionContent(content); File.WriteAllText(filePath, content); ScrubFile(filePath); } foreach (var filePath in projectFiles) { string content = File.ReadAllText(filePath); content = ScrubProjectContent(content); File.WriteAllText(filePath, content); ScrubFile(filePath); } } #endregion #region Private Members /// <summary> /// Try to find automatically the installation of VSCode /// </summary> static string AutodetectCodePath() { string[] possiblePaths = #if UNITY_EDITOR_OSX { "/Applications/Visual Studio Code.app", "/Applications/Visual Studio Code - Insiders.app" }; #elif UNITY_EDITOR_WIN { ProgramFilesx86() + Path.DirectorySeparatorChar + "Microsoft VS Code" + Path.DirectorySeparatorChar + "bin" + Path.DirectorySeparatorChar + "code.cmd", ProgramFilesx86() + Path.DirectorySeparatorChar + "Microsoft VS Code Insiders" + Path.DirectorySeparatorChar + "bin" + Path.DirectorySeparatorChar + "code-insiders.cmd" }; #else { "/usr/bin/code", "/bin/code", "/usr/local/bin/code", "/var/lib/flatpak/exports/bin/com.visualstudio.code" }; #endif for(int i = 0; i < possiblePaths.Length; i++) { if(VSCodeExists(possiblePaths[i])) { return possiblePaths[i]; } } PrintNotFound(possiblePaths[0]); return possiblePaths[0]; //returns the default one, printing a warning message 'executable not found' } /// <summary> /// Call VSCode with arguments /// </summary> static void CallVSCode(string args) { System.Diagnostics.Process proc = new System.Diagnostics.Process(); if(!VSCodeExists(CodePath)) { PrintNotFound(CodePath); return; } #if UNITY_EDITOR_OSX proc.StartInfo.FileName = "open"; // Check the path to see if there is "Insiders" if (CodePath.Contains("Insiders")) { proc.StartInfo.Arguments = " -n -b \"com.microsoft.VSCodeInsiders\" --args " + args.Replace(@"\", @"\\"); } else { proc.StartInfo.Arguments = " -n -b \"com.microsoft.VSCode\" --args " + args.Replace(@"\", @"\\"); } proc.StartInfo.UseShellExecute = false; #elif UNITY_EDITOR_WIN proc.StartInfo.FileName = CodePath; proc.StartInfo.Arguments = args; proc.StartInfo.UseShellExecute = false; #else proc.StartInfo.FileName = CodePath; proc.StartInfo.Arguments = args.Replace(@"\", @"\\"); proc.StartInfo.UseShellExecute = false; #endif proc.StartInfo.WindowStyle = System.Diagnostics.ProcessWindowStyle.Hidden; proc.StartInfo.CreateNoWindow = true; proc.StartInfo.RedirectStandardOutput = true; proc.Start(); } /// <summary> /// Check for Updates with GitHub /// </summary> static void CheckForUpdate() { var fileContent = string.Empty; EditorUtility.DisplayProgressBar("VSCode", "Checking for updates ...", 0.5f); // Because were not a runtime framework, lets just use the simplest way of doing this try { using (var webClient = new System.Net.WebClient()) { fileContent = webClient.DownloadString("https://raw.githubusercontent.com/dotBunny/VSCode/master/Plugins/Editor/VSCode.cs"); } } catch (Exception e) { if (Debug) { UnityEngine.Debug.Log("[VSCode] " + e.Message); } // Don't go any further if there is an error return; } finally { EditorUtility.ClearProgressBar(); } // Set the last update time LastUpdate = DateTime.Now; // Fix for oddity in downlo if (fileContent.Substring(0, 2) != "/*") { int startPosition = fileContent.IndexOf("/*", StringComparison.CurrentCultureIgnoreCase); // Jump over junk characters fileContent = fileContent.Substring(startPosition); } string[] fileExploded = fileContent.Split('\n'); if (fileExploded.Length > 7) { float github = Version; if (float.TryParse(fileExploded[6].Replace("*", "").Trim(), out github)) { GitHubVersion = github; } if (github > Version) { var GUIDs = AssetDatabase.FindAssets("t:Script VSCode"); var path = Application.dataPath.Substring(0, Application.dataPath.Length - "/Assets".Length) + System.IO.Path.DirectorySeparatorChar + AssetDatabase.GUIDToAssetPath(GUIDs[0]).Replace('/', System.IO.Path.DirectorySeparatorChar); if (EditorUtility.DisplayDialog("VSCode Update", "A newer version of the VSCode plugin is available, would you like to update your version?", "Yes", "No")) { // Always make sure the file is writable System.IO.FileInfo fileInfo = new System.IO.FileInfo(path); fileInfo.IsReadOnly = false; // Write update file File.WriteAllText(path, fileContent); // Force update on text file AssetDatabase.ImportAsset(AssetDatabase.GUIDToAssetPath(GUIDs[0]), ImportAssetOptions.ForceUpdate); } } } } /// <summary> /// Checks whether it should auto-open VSCode /// </summary> /// <remarks> /// VSCode() gets called on Launch and Run, through IntializeOnLoad /// https://docs.unity3d.com/ScriptReference/InitializeOnLoadAttribute.html /// To make sure it only opens VSCode when Unity (re)launches (i.e. opens a project), /// we compare the launch time, which we calculate using EditorApplication.timeSinceStartup. /// </remarks> static void CheckForAutoOpen() { double timeInSeconds = (DateTime.UtcNow - new DateTime(1970, 1, 1)).TotalSeconds; int unityLaunchTimeInSeconds = (int)(timeInSeconds - EditorApplication.timeSinceStartup); int prevUnityLaunchTime = EditorPrefs.GetInt("VSCode_UnityLaunchTime", 0); // If launch time has changed, then Unity was re-opened if (unityLaunchTimeInSeconds > prevUnityLaunchTime) { // Launch VSCode VSCode.MenuOpenProject(); // Save new launch time EditorPrefs.SetInt("VSCode_UnityLaunchTime", unityLaunchTimeInSeconds); } } /// <summary> /// Clear out any existing project files and lingering stuff that might cause problems /// </summary> static void ClearProjectFiles() { var currentDirectory = Directory.GetCurrentDirectory(); var solutionFiles = Directory.GetFiles(currentDirectory, "*.sln"); var projectFiles = Directory.GetFiles(currentDirectory, "*.csproj"); var unityProjectFiles = Directory.GetFiles(currentDirectory, "*.unityproj"); foreach (string solutionFile in solutionFiles) { File.Delete(solutionFile); } foreach (string projectFile in projectFiles) { File.Delete(projectFile); } foreach (string unityProjectFile in unityProjectFiles) { File.Delete(unityProjectFile); } // Replace with our clean files (only in Unity 5) #if !UNITY_4_0 && !UNITY_4_1 && !UNITY_4_2 && !UNITY_4_3 && !UNITY_4_5 && !UNITY_4_6 && !UNITY_4_7 SyncSolution(); #endif } /// <summary> /// Force Unity Preferences Window To Read From Settings /// </summary> static void FixUnityPreferences() { // I want that window, please and thank you System.Type T = System.Type.GetType("UnityEditor.PreferencesWindow,UnityEditor"); if (EditorWindow.focusedWindow == null) return; // Only run this when the editor window is visible (cause its what screwed us up) if (EditorWindow.focusedWindow.GetType() == T) { var window = EditorWindow.GetWindow(T, true, "Unity Preferences"); if (window == null) { if (Debug) { UnityEngine.Debug.Log("[VSCode] No Preferences Window Found (really?)"); } return; } var invokerType = window.GetType(); var invokerMethod = invokerType.GetMethod("ReadPreferences", System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance); if (invokerMethod != null) { invokerMethod.Invoke(window, null); } else if (Debug) { UnityEngine.Debug.Log("[VSCode] No Reflection Method Found For Preferences"); } } } /// <summary> /// Determine what port Unity is listening for on Windows /// </summary> static int GetDebugPort() { #if UNITY_EDITOR_WIN System.Diagnostics.Process process = new System.Diagnostics.Process(); process.StartInfo.FileName = "netstat"; process.StartInfo.Arguments = "-a -n -o -p TCP"; process.StartInfo.UseShellExecute = false; process.StartInfo.RedirectStandardOutput = true; process.Start(); string output = process.StandardOutput.ReadToEnd(); string[] lines = output.Split('\n'); process.WaitForExit(); foreach (string line in lines) { string[] tokens = Regex.Split(line, "\\s+"); if (tokens.Length > 4) { int test = -1; int.TryParse(tokens[5], out test); if (test > 1023) { try { var p = System.Diagnostics.Process.GetProcessById(test); if (p.ProcessName == "Unity") { return test; } } catch { } } } } #else System.Diagnostics.Process process = new System.Diagnostics.Process(); process.StartInfo.FileName = "lsof"; process.StartInfo.Arguments = "-c /^Unity$/ -i 4tcp -a"; process.StartInfo.UseShellExecute = false; process.StartInfo.RedirectStandardOutput = true; process.Start(); // Not thread safe (yet!) string output = process.StandardOutput.ReadToEnd(); string[] lines = output.Split('\n'); process.WaitForExit(); foreach (string line in lines) { int port = -1; if (line.StartsWith("Unity")) { string[] portions = line.Split(new string[] { "TCP *:" }, System.StringSplitOptions.None); if (portions.Length >= 2) { Regex digitsOnly = new Regex(@"[^\d]"); string cleanPort = digitsOnly.Replace(portions[1], ""); if (int.TryParse(cleanPort, out port)) { if (port > -1) { return port; } } } } } #endif return -1; } /// <summary> /// Manually install the original Unity Debuger /// </summary> /// <remarks> /// This should auto update to the latest. /// </remarks> static void InstallUnityDebugger() { EditorUtility.DisplayProgressBar("VSCode", "Downloading Unity Debugger ...", 0.1f); byte[] fileContent; try { using (var webClient = new System.Net.WebClient()) { fileContent = webClient.DownloadData(UnityDebuggerURL); } } catch (Exception e) { if (Debug) { UnityEngine.Debug.Log("[VSCode] " + e.Message); } // Don't go any further if there is an error return; } finally { EditorUtility.ClearProgressBar(); } // Do we have a file to install? if ( fileContent != null ) { string fileName = System.IO.Path.GetTempPath() + Guid.NewGuid().ToString() + ".vsix"; File.WriteAllBytes(fileName, fileContent); CallVSCode(fileName); } } // HACK: This is in until Unity can figure out why MD keeps opening even though a different program is selected. [MenuItem("Assets/Open C# Project In Code", false, 1000)] static void MenuOpenProject() { // Force the project files to be sync SyncSolution(); // Load Project CallVSCode("\"" + ProjectPath + "\""); } /// <summary> /// Print a error message to the Unity Console about not finding the code executable /// </summary> static void PrintNotFound(string path) { UnityEngine.Debug.LogError("[VSCode] Code executable in '" + path + "' not found. Check your" + "Visual Studio Code installation and insert the correct path in the Preferences menu."); } [MenuItem("Assets/Open C# Project In Code", true, 1000)] static bool ValidateMenuOpenProject() { return Enabled; } /// <summary> /// VS Code Integration Preferences Item /// </summary> /// <remarks> /// Contains all 3 toggles: Enable/Disable; Debug On/Off; Writing Launch File On/Off /// </remarks> [PreferenceItem("VSCode")] static void VSCodePreferencesItem() { if (EditorApplication.isCompiling) { EditorGUILayout.HelpBox("Please wait for Unity to finish compiling. \nIf the window doesn't refresh, simply click on the window or move it around to cause a repaint to happen.", MessageType.Warning); return; } EditorGUILayout.BeginVertical(); var developmentInfo = "Support development of this plugin, follow @reapazor and @dotbunny on Twitter."; var versionInfo = string.Format("{0:0.00}", Version) + VersionCode + ", GitHub version @ " + string.Format("{0:0.00}", GitHubVersion); EditorGUILayout.HelpBox(developmentInfo + " --- [ " + versionInfo + " ]", MessageType.None); EditorGUI.BeginChangeCheck(); // Need the VS Code executable EditorGUILayout.BeginHorizontal(); EditorGUILayout.LabelField("VS Code Path", GUILayout.Width(75)); #if UNITY_5_3_OR_NEWER CodePath = EditorGUILayout.DelayedTextField(CodePath, GUILayout.ExpandWidth(true)); #else CodePath = EditorGUILayout.TextField(CodePath, GUILayout.ExpandWidth(true)); #endif GUI.SetNextControlName("PathSetButton"); if(GUILayout.Button("...", GUILayout.Height(14), GUILayout.Width(20))) { GUI.FocusControl("PathSetButton"); string path = EditorUtility.OpenFilePanel( "Visual Studio Code Executable", "", "" ); if( path.Length != 0 && File.Exists(path) || Directory.Exists(path)) { CodePath = path; } } EditorGUILayout.EndHorizontal(); EditorGUILayout.Space(); Enabled = EditorGUILayout.Toggle(new GUIContent("Enable Integration", "Should the integration work its magic for you?"), Enabled); UseUnityDebugger = EditorGUILayout.Toggle(new GUIContent("Use Unity Debugger", "Should the integration integrate with Unity's VSCode Extension (must be installed)."), UseUnityDebugger); AutoOpenEnabled = EditorGUILayout.Toggle(new GUIContent("Enable Auto Open", "When opening a project in Unity, should it automatically open in VS Code?"), AutoOpenEnabled); EditorGUILayout.Space(); RevertExternalScriptEditorOnExit = EditorGUILayout.Toggle(new GUIContent("Revert Script Editor On Unload", "Should the external script editor setting be reverted to its previous setting on project unload? This is useful if you do not use Code with all your projects."),RevertExternalScriptEditorOnExit); Debug = EditorGUILayout.Toggle(new GUIContent("Output Messages To Console", "Should informational messages be sent to Unity's Console?"), Debug); WriteLaunchFile = EditorGUILayout.Toggle(new GUIContent("Always Write Launch File", "Always write the launch.json settings when entering play mode?"), WriteLaunchFile); EditorGUILayout.Space(); AutomaticUpdates = EditorGUILayout.Toggle(new GUIContent("Automatic Updates", "Should the plugin automatically update itself?"), AutomaticUpdates); UpdateTime = EditorGUILayout.IntSlider(new GUIContent("Update Timer (Days)", "After how many days should updates be checked for?"), UpdateTime, 1, 31); EditorGUILayout.Space(); EditorGUILayout.Space(); if (EditorGUI.EndChangeCheck()) { UpdateUnityPreferences(Enabled); // TODO: Force Unity To Reload Preferences // This seems to be a hick up / issue if (VSCode.Debug) { if (Enabled) { UnityEngine.Debug.Log("[VSCode] Integration Enabled"); } else { UnityEngine.Debug.Log("[VSCode] Integration Disabled"); } } } if (GUILayout.Button(new GUIContent("Force Update", "Check for updates to the plugin, right NOW!"))) { CheckForUpdate(); EditorGUILayout.EndVertical(); return; } if (GUILayout.Button(new GUIContent("Write Workspace Settings", "Output a default set of workspace settings for VSCode to use, ignoring many different types of files."))) { WriteWorkspaceSettings(); EditorGUILayout.EndVertical(); return; } EditorGUILayout.Space(); if (UseUnityDebugger) { EditorGUILayout.HelpBox("In order for the \"Use Unity Debuggger\" option to function above, you need to have installed the Unity Debugger Extension for Visual Studio Code.", MessageType.Warning); if (GUILayout.Button(new GUIContent("Install Unity Debugger", "Install the Unity Debugger Extension into Code"))) { InstallUnityDebugger(); EditorGUILayout.EndVertical(); return; } } } /// <summary> /// Asset Open Callback (from Unity) /// </summary> /// <remarks> /// Called when Unity is about to open an asset. /// </remarks> [UnityEditor.Callbacks.OnOpenAssetAttribute()] static bool OnOpenedAsset(int instanceID, int line) { // bail out if we are not using VSCode if (!Enabled) { return false; } // current path without the asset folder string appPath = ProjectPath; // determine asset that has been double clicked in the project view UnityEngine.Object selected = EditorUtility.InstanceIDToObject(instanceID); // additional file extensions string selectedFilePath = AssetDatabase.GetAssetPath(selected); string selectedFileExt = Path.GetExtension(selectedFilePath); if (selectedFileExt == null) { selectedFileExt = String.Empty; } if (!String.IsNullOrEmpty(selectedFileExt)) { selectedFileExt = selectedFileExt.ToLower(); } // open supported object types if (selected.GetType().ToString() == "UnityEditor.MonoScript" || selected.GetType().ToString() == "UnityEngine.Shader" || VSCode.FileExtensions.IndexOf(selectedFileExt, StringComparison.OrdinalIgnoreCase) >= 0) { string completeFilepath = appPath + Path.DirectorySeparatorChar + AssetDatabase.GetAssetPath(selected); string args = null; if (line == -1) { args = "\"" + ProjectPath + "\" \"" + completeFilepath + "\" -r"; } else { args = "\"" + ProjectPath + "\" -g \"" + completeFilepath + ":" + line.ToString() + "\" -r"; } // call 'open' CallVSCode(args); return true; } // Didnt find a code file? let Unity figure it out return false; } /// <summary> /// Executed when the Editor's playmode changes allowing for capture of required data /// </summary> #if UNITY_2017_2_OR_NEWER static void OnPlaymodeStateChanged(UnityEditor.PlayModeStateChange state) #else static void OnPlaymodeStateChanged() #endif { if (UnityEngine.Application.isPlaying && EditorApplication.isPlayingOrWillChangePlaymode) { UpdateLaunchFile(); } } /// <summary> /// Detect when scripts are reloaded and relink playmode detection /// </summary> [UnityEditor.Callbacks.DidReloadScripts()] static void OnScriptReload() { #if UNITY_2017_2_OR_NEWER EditorApplication.playModeStateChanged -= OnPlaymodeStateChanged; EditorApplication.playModeStateChanged += OnPlaymodeStateChanged; #else EditorApplication.playmodeStateChanged -= OnPlaymodeStateChanged; EditorApplication.playmodeStateChanged += OnPlaymodeStateChanged; #endif } /// <summary> /// Remove extra/erroneous lines from a file. static void ScrubFile(string path) { string[] lines = File.ReadAllLines(path); System.Collections.Generic.List<string> newLines = new System.Collections.Generic.List<string>(); for (int i = 0; i < lines.Length; i++) { // Check Empty if (string.IsNullOrEmpty(lines[i].Trim()) || lines[i].Trim() == "\t" || lines[i].Trim() == "\t\t") { } else { newLines.Add(lines[i]); } } File.WriteAllLines(path, newLines.ToArray()); } /// <summary> /// Remove extra/erroneous data from project file (content). /// </summary> static string ScrubProjectContent(string content) { if (content.Length == 0) return ""; #if !UNITY_EDITOR_WIN // Moved to 3.5, 2.0 is legacy. if (content.IndexOf("<TargetFrameworkVersion>v3.5</TargetFrameworkVersion>") != -1) { content = Regex.Replace(content, "<TargetFrameworkVersion>v3.5</TargetFrameworkVersion>", "<TargetFrameworkVersion>v2.0</TargetFrameworkVersion>"); } #endif string targetPath = "";// "<TargetPath>Temp" + Path.DirectorySeparatorChar + "bin" + Path.DirectorySeparatorChar + "Debug" + Path.DirectorySeparatorChar + "</TargetPath>"; //OutputPath string langVersion = "<LangVersion>default</LangVersion>"; bool found = true; int location = 0; string addedOptions = ""; int startLocation = -1; int endLocation = -1; int endLength = 0; while (found) { startLocation = -1; endLocation = -1; endLength = 0; addedOptions = ""; startLocation = content.IndexOf("<PropertyGroup", location); if (startLocation != -1) { endLocation = content.IndexOf("</PropertyGroup>", startLocation); endLength = (endLocation - startLocation); if (endLocation == -1) { found = false; continue; } else { found = true; location = endLocation; } if (content.Substring(startLocation, endLength).IndexOf("<TargetPath>") == -1) { addedOptions += "\n\r\t" + targetPath + "\n\r"; } if (content.Substring(startLocation, endLength).IndexOf("<LangVersion>") == -1) { addedOptions += "\n\r\t" + langVersion + "\n\r"; } if (!string.IsNullOrEmpty(addedOptions)) { content = content.Substring(0, endLocation) + addedOptions + content.Substring(endLocation); } } else { found = false; } } return content; } /// <summary> /// Remove extra/erroneous data from solution file (content). /// </summary> static string ScrubSolutionContent(string content) { // Replace Solution Version content = content.Replace( "Microsoft Visual Studio Solution File, Format Version 11.00\r\n# Visual Studio 2008\r\n", "\r\nMicrosoft Visual Studio Solution File, Format Version 12.00\r\n# Visual Studio 2012"); // Remove Solution Properties (Unity Junk) int startIndex = content.IndexOf("GlobalSection(SolutionProperties) = preSolution"); if (startIndex != -1) { int endIndex = content.IndexOf("EndGlobalSection", startIndex); content = content.Substring(0, startIndex) + content.Substring(endIndex + 16); } return content; } /// <summary> /// Update Visual Studio Code Launch file /// </summary> static void UpdateLaunchFile() { if (!VSCode.Enabled) { return; } else if (VSCode.UseUnityDebugger) { if (!Directory.Exists(VSCode.SettingsFolder)) System.IO.Directory.CreateDirectory(VSCode.SettingsFolder); // Write out proper formatted JSON (hence no more SimpleJSON here) string fileContent = "{\n\t\"version\": \"0.2.0\",\n\t\"configurations\": [\n\t\t{\n\t\t\t\"name\": \"Unity Editor\",\n\t\t\t\"type\": \"unity\",\n\t\t\t\"request\": \"launch\"\n\t\t},\n\t\t{\n\t\t\t\"name\": \"Windows Player\",\n\t\t\t\"type\": \"unity\",\n\t\t\t\"request\": \"launch\"\n\t\t},\n\t\t{\n\t\t\t\"name\": \"OSX Player\",\n\t\t\t\"type\": \"unity\",\n\t\t\t\"request\": \"launch\"\n\t\t},\n\t\t{\n\t\t\t\"name\": \"Linux Player\",\n\t\t\t\"type\": \"unity\",\n\t\t\t\"request\": \"launch\"\n\t\t},\n\t\t{\n\t\t\t\"name\": \"iOS Player\",\n\t\t\t\"type\": \"unity\",\n\t\t\t\"request\": \"launch\"\n\t\t},\n\t\t{\n\t\t\t\"name\": \"Android Player\",\n\t\t\t\"type\": \"unity\",\n\t\t\t\"request\": \"launch\"\n\n\t\t}\n\t]\n}"; File.WriteAllText(VSCode.LaunchPath, fileContent); } else if (VSCode.WriteLaunchFile) { int port = GetDebugPort(); if (port > -1) { if (!Directory.Exists(VSCode.SettingsFolder)) System.IO.Directory.CreateDirectory(VSCode.SettingsFolder); // Write out proper formatted JSON (hence no more SimpleJSON here) string fileContent = "{\n\t\"version\":\"0.2.0\",\n\t\"configurations\":[ \n\t\t{\n\t\t\t\"name\":\"Unity\",\n\t\t\t\"type\":\"mono\",\n\t\t\t\"request\":\"attach\",\n\t\t\t\"address\":\"localhost\",\n\t\t\t\"port\":" + port + "\n\t\t}\n\t]\n}"; File.WriteAllText(VSCode.LaunchPath, fileContent); if (VSCode.Debug) { UnityEngine.Debug.Log("[VSCode] Debug Port Found (" + port + ")"); } } else { if (VSCode.Debug) { UnityEngine.Debug.LogWarning("[VSCode] Unable to determine debug port."); } } } } /// <summary> /// Update Unity Editor Preferences /// </summary> /// <param name="enabled">Should we turn on this party!</param> static void UpdateUnityPreferences(bool enabled) { if (enabled) { // App if (EditorPrefs.GetString("kScriptsDefaultApp") != CodePath) { EditorPrefs.SetString("VSCode_PreviousApp", EditorPrefs.GetString("kScriptsDefaultApp")); } EditorPrefs.SetString("kScriptsDefaultApp", CodePath); // Arguments if (EditorPrefs.GetString("kScriptEditorArgs") != "-r -g `$(File):$(Line)`") { EditorPrefs.SetString("VSCode_PreviousArgs", EditorPrefs.GetString("kScriptEditorArgs")); } EditorPrefs.SetString("kScriptEditorArgs", "-r -g `$(File):$(Line)`"); EditorPrefs.SetString("kScriptEditorArgs" + CodePath, "-r -g `$(File):$(Line)`"); // MonoDevelop Solution if (EditorPrefs.GetBool("kMonoDevelopSolutionProperties", false)) { EditorPrefs.SetBool("VSCode_PreviousMD", true); } EditorPrefs.SetBool("kMonoDevelopSolutionProperties", false); // Support Unity Proj (JS) if (EditorPrefs.GetBool("kExternalEditorSupportsUnityProj", false)) { EditorPrefs.SetBool("VSCode_PreviousUnityProj", true); } EditorPrefs.SetBool("kExternalEditorSupportsUnityProj", false); if (!EditorPrefs.GetBool("AllowAttachedDebuggingOfEditor", false)) { EditorPrefs.SetBool("VSCode_PreviousAttach", false); } EditorPrefs.SetBool("AllowAttachedDebuggingOfEditor", true); } else { // Restore previous app if (!string.IsNullOrEmpty(EditorPrefs.GetString("VSCode_PreviousApp"))) { EditorPrefs.SetString("kScriptsDefaultApp", EditorPrefs.GetString("VSCode_PreviousApp")); } // Restore previous args if (!string.IsNullOrEmpty(EditorPrefs.GetString("VSCode_PreviousArgs"))) { EditorPrefs.SetString("kScriptEditorArgs", EditorPrefs.GetString("VSCode_PreviousArgs")); } // Restore MD setting if (EditorPrefs.GetBool("VSCode_PreviousMD", false)) { EditorPrefs.SetBool("kMonoDevelopSolutionProperties", true); } // Restore MD setting if (EditorPrefs.GetBool("VSCode_PreviousUnityProj", false)) { EditorPrefs.SetBool("kExternalEditorSupportsUnityProj", true); } // Always leave editor attaching on, I know, it solves the problem of needing to restart for this // to actually work EditorPrefs.SetBool("AllowAttachedDebuggingOfEditor", true); } if (!alreadyFixedPreferences) { alreadyFixedPreferences = true; FixUnityPreferences(); } } /// <summary> /// Determines if the current path to the code executable is valid or not (exists) /// </summary> static bool VSCodeExists(string curPath) { #if UNITY_EDITOR_OSX return System.IO.Directory.Exists(curPath); #else System.IO.FileInfo code = new System.IO.FileInfo(curPath); return code.Exists; #endif } /// <summary> /// Write Default Workspace Settings /// </summary> static void WriteWorkspaceSettings() { if (Debug) { UnityEngine.Debug.Log("[VSCode] Workspace Settings Written"); } if (!Directory.Exists(VSCode.SettingsFolder)) { System.IO.Directory.CreateDirectory(VSCode.SettingsFolder); } string exclusions = // Associations "{\n" + "\t\"files.associations\":\n" + "\t{\n" + "\t\t\"*.bjs\":\"javascript\",\n" + "\t\t\"*.javascript\":\"javascript\"\n" + "\t},\n" + "\t\"files.exclude\":\n" + "\t{\n" + // Hidden Files "\t\t\"**/.DS_Store\":true,\n" + "\t\t\"**/.git\":true,\n" + "\t\t\"**/.gitignore\":true,\n" + "\t\t\"**/.gitattributes\":true,\n" + "\t\t\"**/.gitmodules\":true,\n" + "\t\t\"**/.svn\":true,\n" + // Compressed Files "\t\t\"**/*.zip\":true,\n" + "\t\t\"**/*.gz\":true,\n" + "\t\t\"**/*.7z\":true,\n" + // Project Files "\t\t\"**/*.booproj\":true,\n" + "\t\t\"**/*.pidb\":true,\n" + "\t\t\"**/*.suo\":true,\n" + "\t\t\"**/*.user\":true,\n" + "\t\t\"**/*.userprefs\":true,\n" + "\t\t\"**/*.unityproj\":true,\n" + "\t\t\"**/*.dll\":true,\n" + "\t\t\"**/*.exe\":true,\n" + // Media Files "\t\t\"**/*.pdf\":true,\n" + // Video "\t\t\"**/*.mp4\":true,\n" + // Audio "\t\t\"**/*.mid\":true,\n" + "\t\t\"**/*.midi\":true,\n" + "\t\t\"**/*.wav\":true,\n" + "\t\t\"**/*.mp3\":true,\n" + "\t\t\"**/*.ogg\":true,\n" + // Textures "\t\t\"**/*.gif\":true,\n" + "\t\t\"**/*.ico\":true,\n" + "\t\t\"**/*.jpg\":true,\n" + "\t\t\"**/*.jpeg\":true,\n" + "\t\t\"**/*.png\":true,\n" + "\t\t\"**/*.psd\":true,\n" + "\t\t\"**/*.tga\":true,\n" + "\t\t\"**/*.tif\":true,\n" + "\t\t\"**/*.tiff\":true,\n" + "\t\t\"**/*.hdr\":true,\n" + "\t\t\"**/*.exr\":true,\n" + // Models "\t\t\"**/*.3ds\":true,\n" + "\t\t\"**/*.3DS\":true,\n" + "\t\t\"**/*.fbx\":true,\n" + "\t\t\"**/*.FBX\":true,\n" + "\t\t\"**/*.lxo\":true,\n" + "\t\t\"**/*.LXO\":true,\n" + "\t\t\"**/*.ma\":true,\n" + "\t\t\"**/*.MA\":true,\n" + "\t\t\"**/*.obj\":true,\n" + "\t\t\"**/*.OBJ\":true,\n" + // Unity File Types "\t\t\"**/*.asset\":true,\n" + "\t\t\"**/*.cubemap\":true,\n" + "\t\t\"**/*.flare\":true,\n" + "\t\t\"**/*.mat\":true,\n" + "\t\t\"**/*.meta\":true,\n" + "\t\t\"**/*.prefab\":true,\n" + "\t\t\"**/*.unity\":true,\n" + "\t\t\"**/*.anim\":true,\n" + "\t\t\"**/*.controller\":true,\n" + // Folders "\t\t\"build/\":true,\n" + "\t\t\"Build/\":true,\n" + "\t\t\"Library/\":true,\n" + "\t\t\"library/\":true,\n" + "\t\t\"obj/\":true,\n" + "\t\t\"Obj/\":true,\n" + "\t\t\"ProjectSettings/\":true,\r" + "\t\t\"temp/\":true,\n" + "\t\t\"Temp/\":true\n" + "\t}\n" + "}"; // Dont like the replace but it fixes the issue with the JSON File.WriteAllText(VSCode.SettingsPath, exclusions); } #endregion } /// <summary> /// VSCode Asset AssetPostprocessor /// <para>This will ensure any time that the project files are generated the VSCode versions will be made</para> /// </summary> /// <remarks>Undocumented Event</remarks> public class VSCodeAssetPostprocessor : AssetPostprocessor { /// <summary> /// On documented, project generation event callback /// </summary> private static void OnGeneratedCSProjectFiles() { // Force execution of VSCode update VSCode.UpdateSolution(); } } }
// CodeContracts // // Copyright (c) Microsoft Corporation // // All rights reserved. // // MIT License // // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // File System.Collections.Concurrent.BlockingCollection_1.cs // Automatically generated contract file. using System.Collections.Generic; using System.IO; using System.Text; using System.Diagnostics.Contracts; using System; // Disable the "this variable is not used" warning as every field would imply it. #pragma warning disable 0414 // Disable the "this variable is never assigned to". #pragma warning disable 0067 // Disable the "this event is never assigned to". #pragma warning disable 0649 // Disable the "this variable is never used". #pragma warning disable 0169 // Disable the "new keyword not required" warning. #pragma warning disable 0109 // Disable the "extern without DllImport" warning. #pragma warning disable 0626 // Disable the "could hide other member" warning, can happen on certain properties. #pragma warning disable 0108 namespace System.Collections.Concurrent { public partial class BlockingCollection<T> : IEnumerable<T>, System.Collections.ICollection, System.Collections.IEnumerable, IDisposable { #region Methods and constructors public void Add(T item, System.Threading.CancellationToken cancellationToken) { } public void Add(T item) { } public static int AddToAny(System.Collections.Concurrent.BlockingCollection<T>[] collections, T item) { Contract.Ensures(-1 <= Contract.Result<int>()); return default(int); } public static int AddToAny(System.Collections.Concurrent.BlockingCollection<T>[] collections, T item, System.Threading.CancellationToken cancellationToken) { Contract.Ensures(-1 <= Contract.Result<int>()); return default(int); } public BlockingCollection() { } public BlockingCollection(int boundedCapacity) { } public BlockingCollection(IProducerConsumerCollection<T> collection, int boundedCapacity) { Contract.Ensures((Contract.OldValue(collection.Count) - boundedCapacity) <= 0); } public BlockingCollection(IProducerConsumerCollection<T> collection) { } public void CompleteAdding() { } public void CopyTo(T[] array, int index) { } protected virtual new void Dispose(bool disposing) { } public void Dispose() { } public IEnumerable<T> GetConsumingEnumerable(System.Threading.CancellationToken cancellationToken) { Contract.Ensures(Contract.Result<System.Collections.Generic.IEnumerable<T>>() != null); return default(IEnumerable<T>); } public IEnumerable<T> GetConsumingEnumerable() { Contract.Ensures(Contract.Result<System.Collections.Generic.IEnumerable<T>>() != null); return default(IEnumerable<T>); } IEnumerator<T> System.Collections.Generic.IEnumerable<T>.GetEnumerator() { return default(IEnumerator<T>); } void System.Collections.ICollection.CopyTo(Array array, int index) { } System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { return default(System.Collections.IEnumerator); } public T Take() { return default(T); } public T Take(System.Threading.CancellationToken cancellationToken) { return default(T); } public static int TakeFromAny(System.Collections.Concurrent.BlockingCollection<T>[] collections, out T item) { Contract.Ensures(-1 <= Contract.Result<int>()); item = default(T); return default(int); } public static int TakeFromAny(System.Collections.Concurrent.BlockingCollection<T>[] collections, out T item, System.Threading.CancellationToken cancellationToken) { Contract.Ensures(-1 <= Contract.Result<int>()); item = default(T); return default(int); } public T[] ToArray() { return default(T[]); } public bool TryAdd(T item) { return default(bool); } public bool TryAdd(T item, int millisecondsTimeout) { return default(bool); } public bool TryAdd(T item, int millisecondsTimeout, System.Threading.CancellationToken cancellationToken) { return default(bool); } public bool TryAdd(T item, TimeSpan timeout) { return default(bool); } public static int TryAddToAny(System.Collections.Concurrent.BlockingCollection<T>[] collections, T item, int millisecondsTimeout, System.Threading.CancellationToken cancellationToken) { Contract.Ensures(-1 <= Contract.Result<int>()); return default(int); } public static int TryAddToAny(System.Collections.Concurrent.BlockingCollection<T>[] collections, T item, TimeSpan timeout) { Contract.Ensures(-1 <= Contract.Result<int>()); return default(int); } public static int TryAddToAny(System.Collections.Concurrent.BlockingCollection<T>[] collections, T item) { Contract.Ensures(-1 <= Contract.Result<int>()); return default(int); } public static int TryAddToAny(System.Collections.Concurrent.BlockingCollection<T>[] collections, T item, int millisecondsTimeout) { Contract.Ensures(-1 <= Contract.Result<int>()); return default(int); } public bool TryTake(out T item, int millisecondsTimeout) { item = default(T); return default(bool); } public bool TryTake(out T item, int millisecondsTimeout, System.Threading.CancellationToken cancellationToken) { item = default(T); return default(bool); } public bool TryTake(out T item, TimeSpan timeout) { item = default(T); return default(bool); } public bool TryTake(out T item) { item = default(T); return default(bool); } public static int TryTakeFromAny(System.Collections.Concurrent.BlockingCollection<T>[] collections, out T item) { Contract.Ensures(-1 <= Contract.Result<int>()); item = default(T); return default(int); } public static int TryTakeFromAny(System.Collections.Concurrent.BlockingCollection<T>[] collections, out T item, int millisecondsTimeout) { Contract.Ensures(-1 <= Contract.Result<int>()); item = default(T); return default(int); } public static int TryTakeFromAny(System.Collections.Concurrent.BlockingCollection<T>[] collections, out T item, int millisecondsTimeout, System.Threading.CancellationToken cancellationToken) { Contract.Ensures(-1 <= Contract.Result<int>()); item = default(T); return default(int); } public static int TryTakeFromAny(System.Collections.Concurrent.BlockingCollection<T>[] collections, out T item, TimeSpan timeout) { Contract.Ensures(-1 <= Contract.Result<int>()); item = default(T); return default(int); } #endregion #region Properties and indexers public int BoundedCapacity { get { return default(int); } } public int Count { get { return default(int); } } public bool IsAddingCompleted { get { return default(bool); } } public bool IsCompleted { get { return default(bool); } } bool System.Collections.ICollection.IsSynchronized { get { return default(bool); } } Object System.Collections.ICollection.SyncRoot { get { return default(Object); } } #endregion } }
using Lucene.Net.Index; using Lucene.Net.Store; using Lucene.Net.Util; using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Text.RegularExpressions; using Directory = Lucene.Net.Store.Directory; namespace Lucene.Net.Replicator { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /// <summary> /// A <see cref="IReplicationHandler"/> for replication of an index. Implements /// <see cref="RevisionReady"/> by copying the files pointed by the client resolver to /// the index <see cref="Store.Directory"/> and then touches the index with /// <see cref="IndexWriter"/> to make sure any unused files are deleted. /// </summary> /// <remarks> /// <b>NOTE:</b> This handler assumes that <see cref="IndexWriter"/> is not opened by /// another process on the index directory. In fact, opening an /// <see cref="IndexWriter"/> on the same directory to which files are copied can lead /// to undefined behavior, where some or all the files will be deleted, override /// other files or simply create a mess. When you replicate an index, it is best /// if the index is never modified by <see cref="IndexWriter"/>, except the one that is /// open on the source index, from which you replicate. /// <para/> /// This handler notifies the application via a provided <see cref="T:Func{bool?}"/> when an /// updated index commit was made available for it. /// <para/> /// @lucene.experimental /// </remarks> public class IndexReplicationHandler : IReplicationHandler { /// <summary> /// The component used to log messages to the <see cref="Util.InfoStream.Default"/> /// <see cref="Util.InfoStream"/>. /// </summary> public const string INFO_STREAM_COMPONENT = "IndexReplicationHandler"; private readonly Directory indexDirectory; private readonly Func<bool?> callback; private volatile IDictionary<string, IList<RevisionFile>> currentRevisionFiles; private volatile string currentVersion; private volatile InfoStream infoStream; //Note: LUCENENET Specific Utility Method private void WriteToInfoStream(params string[] messages) { if (!InfoStream.IsEnabled(INFO_STREAM_COMPONENT)) return; foreach (string message in messages) InfoStream.Message(INFO_STREAM_COMPONENT, message); } /// <summary> /// Returns the last <see cref="IndexCommit"/> found in the <see cref="Directory"/>, or /// <c>null</c> if there are no commits. /// </summary> /// <exception cref="IOException"></exception> public static IndexCommit GetLastCommit(Directory directory) { try { // IndexNotFoundException which we handle below return DirectoryReader.IndexExists(directory) ? DirectoryReader.ListCommits(directory).Last() : null; } catch (IndexNotFoundException) { // ignore the exception and return null } return null; } /// <summary> /// Verifies that the last file is segments_N and fails otherwise. It also /// removes and returns the file from the list, because it needs to be handled /// last, after all files. This is important in order to guarantee that if a /// reader sees the new segments_N, all other segment files are already on /// stable storage. /// <para/> /// The reason why the code fails instead of putting segments_N file last is /// that this indicates an error in the <see cref="IRevision"/> implementation. /// </summary> public static string GetSegmentsFile(IList<string> files, bool allowEmpty) { if (!files.Any()) { if (allowEmpty) return null; throw new InvalidOperationException("empty list of files not allowed"); } string segmentsFile = files.Last(); //NOTE: Relying on side-effects outside? files.RemoveAt(files.Count - 1); if (!segmentsFile.StartsWith(IndexFileNames.SEGMENTS) || segmentsFile.Equals(IndexFileNames.SEGMENTS_GEN)) { throw new InvalidOperationException( string.Format("last file to copy+sync must be segments_N but got {0}; check your Revision implementation!", segmentsFile)); } return segmentsFile; } /// <summary> /// Cleanup the index directory by deleting all given files. Called when file /// copy or sync failed. /// </summary> public static void CleanupFilesOnFailure(Directory directory, IList<string> files) { foreach (string file in files) { try { directory.DeleteFile(file); } catch { // suppress any exception because if we're here, it means copy // failed, and we must cleanup after ourselves. } } } /// <summary> /// Cleans up the index directory from old index files. This method uses the /// last commit found by <see cref="GetLastCommit(Directory)"/>. If it matches the /// expected <paramref name="segmentsFile"/>, then all files not referenced by this commit point /// are deleted. /// </summary> /// <remarks> /// <b>NOTE:</b> This method does a best effort attempt to clean the index /// directory. It suppresses any exceptions that occur, as this can be retried /// the next time. /// </remarks> public static void CleanupOldIndexFiles(Directory directory, string segmentsFile) { try { IndexCommit commit = GetLastCommit(directory); // commit == null means weird IO errors occurred, ignore them // if there were any IO errors reading the expected commit point (i.e. // segments files mismatch), then ignore that commit either. if (commit != null && commit.SegmentsFileName.Equals(segmentsFile)) { HashSet<string> commitFiles = new HashSet<string>( commit.FileNames .Union(new[] {IndexFileNames.SEGMENTS_GEN})); Regex matcher = IndexFileNames.CODEC_FILE_PATTERN; foreach (string file in directory.ListAll() .Where(file => !commitFiles.Contains(file) && (matcher.IsMatch(file) || file.StartsWith(IndexFileNames.SEGMENTS)))) { try { directory.DeleteFile(file); } catch { // suppress, it's just a best effort } } } } catch { // ignore any errors that happens during this state and only log it. this // cleanup will have a chance to succeed the next time we get a new // revision. } } /// <summary> /// Copies the provided list of files from the <paramref name="source"/> <see cref="Directory"/> to the /// <paramref name="target"/> <see cref="Directory"/>, if they are not the same. /// </summary> /// <exception cref="IOException"></exception> public static void CopyFiles(Directory source, Directory target, IList<string> files) { if (source.Equals(target)) return; foreach (string file in files) source.Copy(target, file, file, IOContext.READ_ONCE); } /// <summary> /// Writes <see cref="IndexFileNames.SEGMENTS_GEN"/> file to the directory, reading /// the generation from the given <paramref name="segmentsFile"/>. If it is <c>null</c>, /// this method deletes segments.gen from the directory. /// </summary> public static void WriteSegmentsGen(string segmentsFile, Directory directory) { if (segmentsFile != null) { SegmentInfos.WriteSegmentsGen(directory, SegmentInfos.GenerationFromSegmentsFileName(segmentsFile)); return; } try { directory.DeleteFile(IndexFileNames.SEGMENTS_GEN); } catch { // suppress any errors while deleting this file. } } /// <summary> /// Constructor with the given index directory and callback to notify when the /// indexes were updated. /// </summary> public IndexReplicationHandler(Directory indexDirectory, Func<bool?> callback) // LUCENENET TODO: API - shouldn't this be Action ? { this.InfoStream = InfoStream.Default; this.callback = callback; this.indexDirectory = indexDirectory; currentVersion = null; currentRevisionFiles = null; if (DirectoryReader.IndexExists(indexDirectory)) { IList<IndexCommit> commits = DirectoryReader.ListCommits(indexDirectory); IndexCommit commit = commits.Last(); currentVersion = IndexRevision.RevisionVersion(commit); currentRevisionFiles = IndexRevision.RevisionFiles(commit); WriteToInfoStream( string.Format("constructor(): currentVersion={0} currentRevisionFiles={1}", currentVersion, currentRevisionFiles), string.Format("constructor(): commit={0}", commit)); } } public virtual string CurrentVersion { get { return currentVersion; } } public virtual IDictionary<string, IList<RevisionFile>> CurrentRevisionFiles { get { return currentRevisionFiles; } } public virtual void RevisionReady(string version, IDictionary<string, IList<RevisionFile>> revisionFiles, IDictionary<string, IList<string>> copiedFiles, IDictionary<string, Directory> sourceDirectory) { if (revisionFiles.Count > 1) throw new ArgumentException(string.Format("this handler handles only a single source; got {0}", revisionFiles.Keys)); Directory clientDirectory = sourceDirectory.Values.First(); IList<string> files = copiedFiles.Values.First(); string segmentsFile = GetSegmentsFile(files, false); bool success = false; try { // copy files from the client to index directory CopyFiles(clientDirectory, indexDirectory, files); // fsync all copied files (except segmentsFile) indexDirectory.Sync(files); // now copy and fsync segmentsFile clientDirectory.Copy(indexDirectory, segmentsFile, segmentsFile, IOContext.READ_ONCE); indexDirectory.Sync(new[] { segmentsFile }); success = true; } finally { if (!success) { files.Add(segmentsFile); // add it back so it gets deleted too CleanupFilesOnFailure(indexDirectory, files); } } // all files have been successfully copied + sync'd. update the handler's state currentRevisionFiles = revisionFiles; currentVersion = version; WriteToInfoStream(string.Format("revisionReady(): currentVersion={0} currentRevisionFiles={1}", currentVersion, currentRevisionFiles)); // update the segments.gen file WriteSegmentsGen(segmentsFile, indexDirectory); // Cleanup the index directory from old and unused index files. // NOTE: we don't use IndexWriter.deleteUnusedFiles here since it may have // side-effects, e.g. if it hits sudden IO errors while opening the index // (and can end up deleting the entire index). It is not our job to protect // against those errors, app will probably hit them elsewhere. CleanupOldIndexFiles(indexDirectory, segmentsFile); // successfully updated the index, notify the callback that the index is // ready. if (callback != null) { try { callback.Invoke(); } catch (Exception e) { throw new IOException(e.ToString(), e); } } } /// <summary> /// Gets or sets the <see cref="Util.InfoStream"/> to use for logging messages. /// </summary> public virtual InfoStream InfoStream { get { return infoStream; } set { infoStream = value ?? InfoStream.NO_OUTPUT; } } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. using System.Collections.Generic; using System.Collections.ObjectModel; using System.Globalization; using System.Linq; using System.Management.Automation.Language; using System.Management.Automation.Runspaces; namespace System.Management.Automation { // A visitor to walk an AST and validate that it can be converted to PowerShell. internal class ScriptBlockToPowerShellChecker : AstVisitor { private readonly HashSet<string> _validVariables = new HashSet<string>(StringComparer.OrdinalIgnoreCase); internal ScriptBlockAst ScriptBeingConverted { get; set; } internal bool UsesParameter { get; private set; } internal bool HasUsingExpr { get; private set; } public override AstVisitAction VisitParameter(ParameterAst parameterAst) { if (parameterAst.Name.VariablePath.IsAnyLocal()) { _validVariables.Add(parameterAst.Name.VariablePath.UnqualifiedPath); } return AstVisitAction.Continue; } public override AstVisitAction VisitPipeline(PipelineAst pipelineAst) { if (pipelineAst.PipelineElements[0] is CommandExpressionAst) { // If the first element is a CommandExpression, this pipeline should be the value // of a parameter. We want to avoid a scriptblock that contains only a pure expression. // The check "pipelineAst.Parent.Parent == ScriptBeingConverted" guarantees we throw // error on that kind of scriptblock. // Disallow pure expressions at the "top" level, but allow them otherwise. // We want to catch: // 1 | echo // But we don't want to error out on: // echo $(1) // See the comment in VisitCommand on why it's safe to check Parent.Parent, we // know that we have at least: // * a NamedBlockAst (the end block) // * a ScriptBlockAst (the ast we're comparing to) if (pipelineAst.GetPureExpression() == null || pipelineAst.Parent.Parent == ScriptBeingConverted) { ThrowError( new ScriptBlockToPowerShellNotSupportedException( "CantConvertPipelineStartsWithExpression", null, AutomationExceptions.CantConvertPipelineStartsWithExpression), pipelineAst); } } return AstVisitAction.Continue; } public override AstVisitAction VisitCommand(CommandAst commandAst) { if (commandAst.InvocationOperator == TokenKind.Dot) { ThrowError( new ScriptBlockToPowerShellNotSupportedException( "CantConvertWithDotSourcing", null, AutomationExceptions.CantConvertWithDotSourcing), commandAst); } // Up front checking ensures that we have a simple script block, // so we can safely assume that the parents are: // * a PipelineAst // * a NamedBlockAst (the end block) // * a ScriptBlockAst (the ast we're comparing to) // If that isn't the case, the conversion isn't allowed. It // is also safe to assume that we have at least 3 parents, a script block can't be simpler. if (commandAst.Parent.Parent.Parent != ScriptBeingConverted) { ThrowError( new ScriptBlockToPowerShellNotSupportedException( "CantConvertWithCommandInvocations", null, AutomationExceptions.CantConvertWithCommandInvocations), commandAst); } if (commandAst.CommandElements[0] is ScriptBlockExpressionAst) { ThrowError( new ScriptBlockToPowerShellNotSupportedException( "CantConvertWithScriptBlockInvocation", null, AutomationExceptions.CantConvertWithScriptBlockInvocation), commandAst); } return AstVisitAction.Continue; } public override AstVisitAction VisitMergingRedirection(MergingRedirectionAst redirectionAst) { if (redirectionAst.ToStream != RedirectionStream.Output) { ThrowError( new ScriptBlockToPowerShellNotSupportedException( "CanConvertOneOutputErrorRedir", null, AutomationExceptions.CanConvertOneOutputErrorRedir), redirectionAst); } return AstVisitAction.Continue; } public override AstVisitAction VisitFileRedirection(FileRedirectionAst redirectionAst) { ThrowError( new ScriptBlockToPowerShellNotSupportedException( "CanConvertOneOutputErrorRedir", null, AutomationExceptions.CanConvertOneOutputErrorRedir), redirectionAst); return AstVisitAction.Continue; } public override AstVisitAction VisitVariableExpression(VariableExpressionAst variableExpressionAst) { bool usesParameterReference = this.UsesParameter; bool ok = variableExpressionAst.IsSafeVariableReference(_validVariables, ref usesParameterReference); if (usesParameterReference != this.UsesParameter) { this.UsesParameter = usesParameterReference; } if (!ok) { ThrowError(new ScriptBlockToPowerShellNotSupportedException( "CantConvertWithUndeclaredVariables", null, AutomationExceptions.CantConvertWithUndeclaredVariables, variableExpressionAst.VariablePath), variableExpressionAst); } return AstVisitAction.Continue; } public override AstVisitAction VisitScriptBlockExpression(ScriptBlockExpressionAst scriptBlockExpressionAst) { ThrowError(new ScriptBlockToPowerShellNotSupportedException( "CantConvertWithScriptBlocks", null, AutomationExceptions.CantConvertWithScriptBlocks), scriptBlockExpressionAst); return AstVisitAction.SkipChildren; } public override AstVisitAction VisitUsingExpression(UsingExpressionAst usingExpressionAst) { // A using expression is always allowed, it simply gets re-written to be a parameter HasUsingExpr = true; // Skip the children - the expression is evaluated before sending to the remote machine, // so it doesn't matter what we might find in the children. return AstVisitAction.SkipChildren; } internal static void ThrowError(ScriptBlockToPowerShellNotSupportedException ex, Ast ast) { InterpreterError.UpdateExceptionErrorRecordPosition(ex, ast.Extent); throw ex; } } internal class UsingExpressionAstSearcher : AstSearcher { internal static IEnumerable<Ast> FindAllUsingExpressionExceptForWorkflow(Ast ast) { Diagnostics.Assert(ast != null, "caller to verify arguments"); var searcher = new UsingExpressionAstSearcher(astParam => astParam is UsingExpressionAst, stopOnFirst: false, searchNestedScriptBlocks: true); ast.InternalVisit(searcher); return searcher.Results; } private UsingExpressionAstSearcher(Func<Ast, bool> callback, bool stopOnFirst, bool searchNestedScriptBlocks) : base(callback, stopOnFirst, searchNestedScriptBlocks) { } public override AstVisitAction VisitFunctionDefinition(FunctionDefinitionAst ast) { // Skip the workflow. We are not interested in the UsingExpressions in a workflow if (ast.IsWorkflow) { return AstVisitAction.SkipChildren; } return CheckScriptBlock(ast); } } /// <summary> /// Converts a ScriptBlock to a PowerShell object by traversing the /// given Ast. /// </summary> internal class ScriptBlockToPowerShellConverter { private readonly PowerShell _powershell; private ExecutionContext _context; private Dictionary<string, object> _usingValueMap; private bool? _createLocalScope; private ScriptBlockToPowerShellConverter() { _powershell = PowerShell.Create(); } internal static PowerShell Convert(ScriptBlockAst body, ReadOnlyCollection<ParameterAst> functionParameters, bool isTrustedInput, ExecutionContext context, Dictionary<string, object> variables, bool filterNonUsingVariables, bool? createLocalScope, object[] args) { ExecutionContext.CheckStackDepth(); if (args == null) { args = Utils.EmptyArray<object>(); } // Perform validations on the ScriptBlock. GetSimplePipeline can allow for more than one // pipeline if the first parameter is true, but Invoke-Command doesn't yet support multiple // pipelines in a PowerShell (it just grabs the last command directly.) The rest of this // code properly supports multiple pipelines, so it should just work to change the false to true // if/when Invoke-Command can support multiple pipelines. string errorId; string errorMsg; body.GetSimplePipeline(true, out errorId, out errorMsg); if (errorId != null) { throw new ScriptBlockToPowerShellNotSupportedException(errorId, null, errorMsg); } var checker = new ScriptBlockToPowerShellChecker { ScriptBeingConverted = body }; if (functionParameters != null) { foreach (var parameter in functionParameters) { parameter.InternalVisit(checker); } } body.InternalVisit(checker); // When the context is null (or they haven't supplied any variables), throw, but only if we really need the // context (basically, if we have some variable reference to resolve). if (context == null && (checker.HasUsingExpr || checker.UsesParameter) && (variables == null)) { throw new PSInvalidOperationException(AutomationExceptions.CantConvertScriptBlockWithNoContext); } try { var converter = new ScriptBlockToPowerShellConverter { _context = context, _createLocalScope = createLocalScope }; if (checker.HasUsingExpr) { converter._usingValueMap = GetUsingValues(body, isTrustedInput, context, variables, filterNonUsingVariables).Item1; } if (checker.UsesParameter) { // If any parameters are used, we create a new scope and bind the parameters. var newScope = context.EngineSessionState.NewScope(false); context.EngineSessionState.CurrentScope = newScope; context.EngineSessionState.CurrentScope.ScopeOrigin = CommandOrigin.Internal; var locals = MutableTuple.MakeTuple(Compiler.DottedLocalsTupleType, Compiler.DottedLocalsNameIndexMap); // Get the parameter metadata for the script block. // If 'functionParameters' is not null, then the ScriptBlockAst is actually the body of a FunctionDefinitionAst, and it doesn't have a ParamBlock. // If 'functionParameters' is null, then the ScriptBlockAst may have parameters defined in its ParamBlock. bool usesCmdletBinding = false; var parameters = functionParameters != null ? Compiler.GetParameterMetaData(functionParameters, true, ref usesCmdletBinding) : ((IParameterMetadataProvider)body).GetParameterMetadata(true, ref usesCmdletBinding); object[] remainingArgs = ScriptBlock.BindArgumentsForScriptblockInvoke( (RuntimeDefinedParameter[])parameters.Data, args, context, false, null, locals); locals.SetAutomaticVariable(AutomaticVariable.Args, remainingArgs, context); newScope.LocalsTuple = locals; } foreach (var pipeline in body.EndBlock.Statements.OfType<PipelineAst>()) { converter._powershell.AddStatement(); converter.ConvertPipeline(pipeline, isTrustedInput); } return converter._powershell; } finally { if (checker.UsesParameter) { context.EngineSessionState.RemoveScope(context.EngineSessionState.CurrentScope); } } } /// <summary> /// Get using values in the dictionary form. /// </summary> internal static Dictionary<string, object> GetUsingValuesAsDictionary(ScriptBlock scriptBlock, bool isTrustedInput, ExecutionContext context, Dictionary<string, object> variables) { return GetUsingValues(scriptBlock.Ast, isTrustedInput, context, variables, false).Item1; } /// <summary> /// Get using values in the array form. /// </summary> internal static object[] GetUsingValuesAsArray(ScriptBlock scriptBlock, bool isTrustedInput, ExecutionContext context, Dictionary<string, object> variables) { return GetUsingValues(scriptBlock.Ast, isTrustedInput, context, variables, false).Item2; } /// <summary> /// Collect values for UsingExpressions, in the form of a dictionary and an array. /// - The dictionary form is used when the remote server is PSv5 and later version for handling UsingExpression in Invoke-Command/Start-Job /// - The array form is used when the remote server is PSv3 and PSv4 for handling UsingExpression in Invoke-Command. /// </summary> /// <remarks> /// We still keep the array-form using values because we want to avoid any breaking changes when running Invoke-Command /// targeting PSv3 or PSv4 remote end -- if UsingExpressions are all in the same scope, then we still pass an array of using /// values to the remote end; otherwise, we will handle UsingExpression as if the remote end is PSv2. /// </remarks> /// <returns> /// A tuple of the dictionary-form and the array-form using values. /// If the array-form using value is null, then there are UsingExpressions used in different scopes. /// </returns> private static Tuple<Dictionary<string, object>, object[]> GetUsingValues(Ast body, bool isTrustedInput, ExecutionContext context, Dictionary<string, object> variables, bool filterNonUsingVariables) { Diagnostics.Assert(context != null || variables != null, "can't retrieve variables with no context and no variables"); var usingAsts = UsingExpressionAstSearcher.FindAllUsingExpressionExceptForWorkflow(body).ToList(); var usingValueArray = new object[usingAsts.Count]; var usingValueMap = new Dictionary<string, object>(usingAsts.Count); HashSet<string> usingVarNames = (variables != null && filterNonUsingVariables) ? new HashSet<string>() : null; // Used to check if the PSv3/PSv4 way of handling UsingExpression can continue to be used. bool hasUsingExprInDifferentScope = false; ScriptBlockAst sbClosestToPreUsingExpr = null; UsingExpressionAst usingAst = null; Version oldStrictVersion = null; try { if (context != null) { oldStrictVersion = context.EngineSessionState.CurrentScope.StrictModeVersion; context.EngineSessionState.CurrentScope.StrictModeVersion = PSVersionInfo.PSVersion; } for (int i = 0; i < usingAsts.Count; ++i) { usingAst = (UsingExpressionAst)usingAsts[i]; object value = null; // This happens only when GetUsingValues gets called outside the ScriptBlockToPowerShellConverter class if (!hasUsingExprInDifferentScope && HasUsingExpressionsInDifferentScopes(usingAst, body, ref sbClosestToPreUsingExpr)) { // If there are UsingExpressions in different scopes, the array-form using values will not be useful // even if the remote end is PSv3 or PSv4, because the way we handle using expression in PSv3 and PSv4 // doesn't support UsingExpression in different scopes. In this case, we will set the array-form using // value to be null before return. // // Note that this check only affect array-form using value. In PSv5, we change the way to handle UsingExpression // on both client and server sides. The dictionary-form using values is used and UsingExpression in different // scope is supported. hasUsingExprInDifferentScope = true; } if (variables != null) { var variableAst = usingAst.SubExpression as VariableExpressionAst; if (variableAst == null) { throw InterpreterError.NewInterpreterException(null, typeof(RuntimeException), usingAst.Extent, "CantGetUsingExpressionValueWithSpecifiedVariableDictionary", AutomationExceptions.CantGetUsingExpressionValueWithSpecifiedVariableDictionary, usingAst.Extent.Text); } string varName = variableAst.VariablePath.UserPath; if (varName != null && variables.TryGetValue(varName, out value) && usingVarNames != null) { usingVarNames.Add(varName); } } else { value = Compiler.GetExpressionValue(usingAst.SubExpression, isTrustedInput, context); } // Collect UsingExpression value as an array usingValueArray[i] = value; // Collect UsingExpression value as a dictionary string usingAstKey = PsUtils.GetUsingExpressionKey(usingAst); if (!usingValueMap.ContainsKey(usingAstKey)) { usingValueMap.Add(usingAstKey, value); } } } catch (RuntimeException rte) { if (rte.ErrorRecord.FullyQualifiedErrorId.Equals("VariableIsUndefined", StringComparison.Ordinal)) { throw InterpreterError.NewInterpreterException(null, typeof(RuntimeException), usingAst.Extent, "UsingVariableIsUndefined", AutomationExceptions.UsingVariableIsUndefined, rte.ErrorRecord.TargetObject); } else if (rte.ErrorRecord.FullyQualifiedErrorId.Equals("CantGetUsingExpressionValueWithSpecifiedVariableDictionary", StringComparison.Ordinal)) { throw; } } finally { if (context != null) { context.EngineSessionState.CurrentScope.StrictModeVersion = oldStrictVersion; } } if (usingVarNames != null) { string[] keys = variables.Keys.ToArray(); foreach (string key in keys) { if (!usingVarNames.Contains(key)) { variables.Remove(key); } } } if (hasUsingExprInDifferentScope) { usingValueArray = null; } return Tuple.Create(usingValueMap, usingValueArray); } /// <summary> /// Check if the given UsingExpression is in a different scope from the previous UsingExpression that we analyzed. /// </summary> /// <remarks> /// Note that the value of <paramref name="usingExpr"/> is retrieved by calling 'UsingExpressionAstSearcher.FindAllUsingExpressionExceptForWorkflow'. /// So <paramref name="usingExpr"/> is guaranteed not inside a workflow. /// </remarks> /// <param name="usingExpr">The UsingExpression to analyze.</param> /// <param name="topLevelParent">The top level Ast, should be either ScriptBlockAst or FunctionDefinitionAst.</param> /// <param name="sbClosestToPreviousUsingExpr">The ScriptBlockAst that represents the scope of the previously analyzed UsingExpressions.</param> private static bool HasUsingExpressionsInDifferentScopes(UsingExpressionAst usingExpr, Ast topLevelParent, ref ScriptBlockAst sbClosestToPreviousUsingExpr) { Diagnostics.Assert(topLevelParent is ScriptBlockAst || topLevelParent is FunctionDefinitionAst, "the top level parent should be either a ScriptBlockAst or FunctionDefinitionAst"); // Scan up the parents of a UsingExpression to check if it's in a nested function/filter/ScriptBlock Ast current = usingExpr; do { current = current.Parent; var sbAst = current as ScriptBlockAst; if (sbAst != null) { // We find the closest parent ScriptBlockAst of the current UsingExpression, which represents the scope // that the current UsingExpression is in. if (sbClosestToPreviousUsingExpr == null) { // The current UsingExpression is the first one to be analyzed. sbClosestToPreviousUsingExpr = sbAst; return false; } if (sbAst == sbClosestToPreviousUsingExpr) { // The current UsingExpression is in the same scope as the previous UsingExpression we analyzed. return false; } // The current UsingExpression is in a different scope from the previous UsingExpression we analyzed. return true; } var funcAst = current as FunctionDefinitionAst; if (funcAst != null) { // The parent chain of the current UsingExpression reaches a FunctionDefinitionAst, then the UsingExpression // must be in 'Parameters' property of this FunctionDefinitionAst. // In this case, the 'Body' of this FunctionDefinitionAst represents the scope that the UsingExpression is in. if (sbClosestToPreviousUsingExpr == null) { // The current UsingExpression is the first one to be analyzed. sbClosestToPreviousUsingExpr = funcAst.Body; return false; } if (funcAst.Body == sbClosestToPreviousUsingExpr) { // The current UsingExpression is in the same scope as the previous UsingExpression we analyzed. return false; } // The current UsingExpression is in a different scope from the previous UsingExpression we analyzed. return true; } } while (current != topLevelParent); Diagnostics.Assert(false, "Unreachable Code. Top level parent is eitehr ScriptBlockAst or FunctionDefinitionAst, so it should return within the loop for sure."); // I don't think it's reachable, but if it happens, just assume there are UsingExpressions in different scopes. return true; } private void ConvertPipeline(PipelineAst pipelineAst, bool isTrustedInput) { foreach (var command in pipelineAst.PipelineElements) { ConvertCommand((CommandAst)command, isTrustedInput); } } private void ConvertCommand(CommandAst commandAst, bool isTrustedInput) { // First need command name. var commandName = GetCommandName(commandAst.CommandElements[0], isTrustedInput); var command = new Command(commandName, isScript: false, useLocalScope: _createLocalScope); // Handle redirections, if any (there can really be just 0 or 1). if (commandAst.Redirections.Count > 0) { Diagnostics.Assert(commandAst.Redirections.Count == 1, "only 1 kind of redirection is supported"); Diagnostics.Assert(commandAst.Redirections[0] is MergingRedirectionAst, "unexpected redirection type"); PipelineResultTypes toType = PipelineResultTypes.Output; PipelineResultTypes fromType; switch (commandAst.Redirections[0].FromStream) { case RedirectionStream.Error: fromType = PipelineResultTypes.Error; break; case RedirectionStream.Warning: fromType = PipelineResultTypes.Warning; break; case RedirectionStream.Verbose: fromType = PipelineResultTypes.Verbose; break; case RedirectionStream.Debug: fromType = PipelineResultTypes.Debug; break; case RedirectionStream.Information: fromType = PipelineResultTypes.Information; break; case RedirectionStream.All: fromType = PipelineResultTypes.All; break; default: // Default to Error->Output to be compatible with V2. fromType = PipelineResultTypes.Error; break; } command.MergeMyResults(fromType, toType); } _powershell.AddCommand(command); // Now the parameters and arguments. foreach (var ast in commandAst.CommandElements.Skip(1)) { var exprAst = ast as ExpressionAst; if (exprAst != null) { VariableExpressionAst variableAst = null; var usingExprAst = ast as UsingExpressionAst; if (usingExprAst != null) { string usingAstKey = PsUtils.GetUsingExpressionKey(usingExprAst); object usingValue = _usingValueMap[usingAstKey]; variableAst = usingExprAst.SubExpression as VariableExpressionAst; if (variableAst != null && variableAst.Splatted) { // Support the splatting of a dictionary var parameters = usingValue as System.Collections.IDictionary; if (parameters != null) { _powershell.AddParameters(parameters); } else { // Support the splatting of an array var arguments = usingValue as System.Collections.IEnumerable; if (arguments != null) { foreach (Object argument in arguments) { _powershell.AddArgument(argument); } } else { // Splat the object directly. _powershell.AddArgument(usingValue); } } } else { _powershell.AddArgument(usingValue); } continue; } variableAst = ast as VariableExpressionAst; if (variableAst != null && variableAst.Splatted) { GetSplattedVariable(variableAst); } else { var constantExprAst = ast as ConstantExpressionAst; object argument; if (constantExprAst != null && LanguagePrimitives.IsNumeric(LanguagePrimitives.GetTypeCode(constantExprAst.StaticType))) { var commandArgumentText = constantExprAst.Extent.Text; argument = constantExprAst.Value; if (!commandArgumentText.Equals(constantExprAst.Value.ToString(), StringComparison.Ordinal)) { // The wrapped number will actually return a PSObject which could end holding a reference to // a typetable, making the object runspace specific. We should find a better way to avoid // any possibility of sharing problems, though this is unlikely to cause problems. argument = ParserOps.WrappedNumber(argument, commandArgumentText); } } else { if (!isTrustedInput) { try { argument = GetSafeValueVisitor.GetSafeValue(exprAst, _context, GetSafeValueVisitor.SafeValueContext.GetPowerShell); } catch (System.Exception) { throw new ScriptBlockToPowerShellNotSupportedException( "CantConvertWithDynamicExpression", null, AutomationExceptions.CantConvertWithDynamicExpression, exprAst.Extent.Text); } } else { argument = GetExpressionValue(exprAst, isTrustedInput); } } _powershell.AddArgument(argument); } } else { AddParameter((CommandParameterAst)ast, isTrustedInput); } } } private string GetCommandName(CommandElementAst commandNameAst, bool isTrustedInput) { var exprAst = commandNameAst as ExpressionAst; string commandName; if (exprAst != null) { var value = GetExpressionValue(exprAst, isTrustedInput); if (value == null) { ScriptBlockToPowerShellChecker.ThrowError( new ScriptBlockToPowerShellNotSupportedException( "CantConvertWithScriptBlockInvocation", null, AutomationExceptions.CantConvertWithScriptBlockInvocation), exprAst); } if (value is CommandInfo) { commandName = ((CommandInfo)value).Name; } else { commandName = value as string; } } else { // If this assertion fires, the command name is determined incorrectly. Diagnostics.Assert(commandNameAst is CommandParameterAst, "Unexpected element not handled correctly."); commandName = commandNameAst.Extent.Text; } if (string.IsNullOrWhiteSpace(commandName)) { // TODO: could use a better error here throw new ScriptBlockToPowerShellNotSupportedException( "CantConvertWithScriptBlockInvocation", null, AutomationExceptions.CantConvertWithScriptBlockInvocation); } return commandName; } private void GetSplattedVariable(VariableExpressionAst variableAst) { if (_context == null) { throw new PSInvalidOperationException(AutomationExceptions.CantConvertScriptBlockWithNoContext); } // Process the contents of a splatted variable into the arguments for this // command. If the variable contains a hashtable, distribute the key/value pairs // If it's an enumerable, then distribute the values as $args and finally // if it's a scalar, then the effect is equivalent to $var object splattedValue = _context.GetVariableValue(variableAst.VariablePath); foreach (var splattedParameter in PipelineOps.Splat(splattedValue, variableAst)) { CommandParameter publicParameter = CommandParameter.FromCommandParameterInternal(splattedParameter); _powershell.AddParameter(publicParameter.Name, publicParameter.Value); } } private object GetExpressionValue(ExpressionAst exprAst, bool isTrustedInput) { // be sure that there's a context at hand if (_context == null) { var rs = RunspaceFactory.CreateRunspace(InitialSessionState.Create()); rs.Open(); _context = rs.ExecutionContext; } if (!isTrustedInput) // if it's not trusted, call the safe value visitor { return GetSafeValueVisitor.GetSafeValue(exprAst, _context, GetSafeValueVisitor.SafeValueContext.GetPowerShell); } return Compiler.GetExpressionValue(exprAst, isTrustedInput, _context, _usingValueMap); } private void AddParameter(CommandParameterAst commandParameterAst, bool isTrustedInput) { string nameSuffix; object argument; if (commandParameterAst.Argument != null) { var arg = commandParameterAst.Argument; var errorPos = commandParameterAst.ErrorPosition; bool spaceAfterParameter = (errorPos.EndLineNumber != arg.Extent.StartLineNumber || errorPos.EndColumnNumber != arg.Extent.StartColumnNumber); nameSuffix = spaceAfterParameter ? ": " : ":"; argument = GetExpressionValue(commandParameterAst.Argument, isTrustedInput); } else { nameSuffix = string.Empty; argument = null; } // first character in parameter name must be a dash _powershell.AddParameter( string.Format(CultureInfo.InvariantCulture, "-{0}{1}", commandParameterAst.ParameterName, nameSuffix), argument); } } }
#region License // Copyright (c) 2010-2019, Mark Final // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // // * Redistributions of source code must retain the above copyright notice, this // list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * Neither the name of BuildAMation nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #endregion // License using System.Linq; namespace C { /// <summary> /// Utility class for accessing toolchain registration /// </summary> static class DefaultToolchain { private static readonly Options.DefaultToolchainCommand SelectDefaultToolChainCommand = new Options.DefaultToolchainCommand(); private static readonly System.Collections.Generic.Dictionary<EBit, Bam.Core.TypeArray> C_Compilers = new System.Collections.Generic.Dictionary<EBit, Bam.Core.TypeArray>(); private static readonly System.Collections.Generic.Dictionary<EBit, Bam.Core.TypeArray> Cxx_Compilers = new System.Collections.Generic.Dictionary<EBit, Bam.Core.TypeArray>(); private static readonly System.Collections.Generic.Dictionary<EBit, Bam.Core.TypeArray> Archivers = new System.Collections.Generic.Dictionary<EBit, Bam.Core.TypeArray>(); private static readonly System.Collections.Generic.Dictionary<EBit, Bam.Core.TypeArray> C_Linkers = new System.Collections.Generic.Dictionary<EBit, Bam.Core.TypeArray>(); private static readonly System.Collections.Generic.Dictionary<EBit, Bam.Core.TypeArray> Cxx_Linkers = new System.Collections.Generic.Dictionary<EBit, Bam.Core.TypeArray>(); private static readonly System.Collections.Generic.Dictionary<EBit, Bam.Core.TypeArray> ObjectiveC_Compilers = new System.Collections.Generic.Dictionary<EBit, Bam.Core.TypeArray>(); private static readonly System.Collections.Generic.Dictionary<EBit, Bam.Core.TypeArray> ObjectiveCxx_Compilers = new System.Collections.Generic.Dictionary<EBit, Bam.Core.TypeArray>(); private static readonly System.Collections.Generic.Dictionary<EBit, Bam.Core.TypeArray> WinResourceCompilers = new System.Collections.Generic.Dictionary<EBit, Bam.Core.TypeArray>(); private static readonly System.Collections.Generic.Dictionary<EBit, Bam.Core.TypeArray> Assemblers = new System.Collections.Generic.Dictionary<EBit, Bam.Core.TypeArray>(); private static readonly System.Collections.Generic.Dictionary<EBit, Bam.Core.TypeArray> Preprocessors = new System.Collections.Generic.Dictionary<EBit, Bam.Core.TypeArray>(); private static readonly string UserToolchainOverride = null; // name of the toolchain to use, after disambiguation private static readonly System.Collections.Generic.Dictionary<EBit, string> DisambiguousToolchainToUse = new System.Collections.Generic.Dictionary<EBit, string>(); // cache of the tool modules for a particular toolchain private sealed class ToolModules { public CompilerTool c_compiler = null; public CompilerTool cxx_compiler = null; public LibrarianTool librarian = null; public LinkerTool c_linker = null; public LinkerTool cxx_linker = null; public CompilerTool objc_compiler = null; public CompilerTool objcxx_compiler = null; public WinResourceCompilerTool winres_compiler = null; public AssemblerTool assembler = null; public PreprocessorTool preprocessor = null; }; private static readonly System.Collections.Generic.Dictionary<EBit, ToolModules> Default = new System.Collections.Generic.Dictionary<EBit, ToolModules>(); private static System.Collections.Generic.IEnumerable<System.Tuple<System.Type,T>> GetToolsFromMetaData<T>() where T : ToolRegistrationAttribute { var discoverAllToolchains = Bam.Core.CommandLineProcessor.Evaluate(new Options.DiscoverAllToolchains()); var allTypes = Bam.Core.Graph.Instance.ScriptAssembly.GetTypes(); foreach (var type in allTypes) { var tools = type.GetCustomAttributes(typeof(T), false) as T[]; if (0 == tools.Length) { continue; } foreach (var tool in tools) { if (!discoverAllToolchains && Bam.Core.OSUtilities.CurrentOS != tool.Platform) { continue; } yield return new System.Tuple<System.Type, T>(type, tool); } } } private static void FindTools<AttributeType, ToolType>( System.Collections.Generic.Dictionary<EBit, Bam.Core.TypeArray> collection) where AttributeType : ToolRegistrationAttribute where ToolType : Bam.Core.PreBuiltTool { var graph = Bam.Core.Graph.Instance; foreach (var toolData in GetToolsFromMetaData<AttributeType>()) { var bits = toolData.Item2.BitDepth; if (!collection.ContainsKey(bits)) { collection[bits] = new Bam.Core.TypeArray(toolData.Item1); } else { collection[bits].AddUnique(toolData.Item1); } } } static DefaultToolchain() { FindTools<RegisterCCompilerAttribute, CompilerTool>(C_Compilers); FindTools<RegisterCxxCompilerAttribute, CompilerTool>(Cxx_Compilers); FindTools<RegisterLibrarianAttribute, LibrarianTool>(Archivers); FindTools<RegisterCLinkerAttribute, LinkerTool>(C_Linkers); FindTools<RegisterCxxLinkerAttribute, LinkerTool>(Cxx_Linkers); FindTools<RegisterObjectiveCCompilerAttribute, CompilerTool>(ObjectiveC_Compilers); FindTools<RegisterObjectiveCxxCompilerAttribute, CompilerTool>(ObjectiveCxx_Compilers); FindTools<RegisterWinResourceCompilerAttribute, WinResourceCompilerTool>(WinResourceCompilers); FindTools<RegisterAssemblerAttribute, AssemblerTool>(Assemblers); FindTools<RegisterPreprocessorAttribute, PreprocessorTool>(Preprocessors); // disambiguate any bitdepths with multiple tool types // any bit depths that remain ambiguous have no entry in DisambiguousToolchainToUse // and exceptions are raised if and when they are used (not now) UserToolchainOverride = Bam.Core.CommandLineProcessor.Evaluate(SelectDefaultToolChainCommand); foreach (EBit bitDepth in System.Enum.GetValues(typeof(EBit))) { // always add an empty ToolModules for each bitdepth - the fields of which are filled out // if and when the specific tools are requested Default.Add(bitDepth, new ToolModules()); if (!C_Compilers.ContainsKey(bitDepth) || !C_Compilers[bitDepth].Any()) { // all bets are off if there's not even a C compiler continue; } var ambiguous_toolchain = (C_Compilers.ContainsKey(bitDepth) && C_Compilers[bitDepth].Skip(1).Any()) || (Cxx_Compilers.ContainsKey(bitDepth) && Cxx_Compilers[bitDepth].Skip(1).Any()) || (Archivers.ContainsKey(bitDepth) && Archivers[bitDepth].Skip(1).Any()) || (C_Linkers.ContainsKey(bitDepth) && C_Linkers[bitDepth].Skip(1).Any()) || (Cxx_Linkers.ContainsKey(bitDepth) && Cxx_Linkers[bitDepth].Skip(1).Any()) || (ObjectiveC_Compilers.ContainsKey(bitDepth) && ObjectiveC_Compilers[bitDepth].Skip(1).Any()) || (ObjectiveCxx_Compilers.ContainsKey(bitDepth) && ObjectiveCxx_Compilers[bitDepth].Skip(1).Any()) || (WinResourceCompilers.ContainsKey(bitDepth) && WinResourceCompilers[bitDepth].Skip(1).Any()) || (Assemblers.ContainsKey(bitDepth) && Assemblers[bitDepth].Skip(1).Any()) || (Preprocessors.ContainsKey(bitDepth) && Preprocessors[bitDepth].Skip(1).Any()); if (ambiguous_toolchain) { if (UserToolchainOverride != null) { foreach (var toolTypeToUse in C_Compilers[bitDepth]) { var attr = toolTypeToUse.GetCustomAttributes(false); var toolToolSet = (attr[0] as ToolRegistrationAttribute).ToolsetName; if (toolToolSet.Equals(UserToolchainOverride, System.StringComparison.Ordinal)) { DisambiguousToolchainToUse.Add(bitDepth, toolToolSet); break; } } } } else { var toolTypeToUse = C_Compilers[bitDepth][0]; var attr = toolTypeToUse.GetCustomAttributes(false); var toolToolSet = (attr[0] as ToolRegistrationAttribute).ToolsetName; DisambiguousToolchainToUse.Add(bitDepth, toolToolSet); } } } private static ToolType GetTool<ToolType>( System.Collections.Generic.Dictionary<EBit, Bam.Core.TypeArray> collection, EBit bitDepth, string toolDescription, ref ToolType toolModule) where ToolType : Bam.Core.PreBuiltTool { if (null != toolModule) { return toolModule; } if (!collection.Any()) { if (!DisambiguousToolchainToUse.Any()) { throw new Bam.Core.Exception( "No toolchain packages were detected for this platform" ); } throw new Bam.Core.Exception( $"Platform contains toolchains, but no registrations of a '{toolDescription}' were found" ); } if (!DisambiguousToolchainToUse.ContainsKey(bitDepth)) { var candidates = collection[bitDepth]; var tooManyInstance = new System.Text.StringBuilder(); tooManyInstance.AppendLine( $"There are {candidates.Count} {toolDescription}s available for this platform in {(int)bitDepth}-bits. Resolve using the command line option {(SelectDefaultToolChainCommand as Bam.Core.ICommandLineArgument).LongName}=<choice>" ); foreach (var tool in candidates) { tooManyInstance.AppendLine($"\t{tool.ToString()}"); } throw new Bam.Core.Exception(tooManyInstance.ToString()); } var toolchainToUse = DisambiguousToolchainToUse[bitDepth]; if (null == toolchainToUse) { throw new Bam.Core.Exception($"{toolDescription} tool is undefined in {bitDepth.ToString()}-bit architectures"); } var toolTypeCollection = collection[bitDepth]; var toolTypeToInstantiate = toolTypeCollection.FirstOrDefault(item => (item.GetCustomAttributes(false)[0] as ToolRegistrationAttribute).ToolsetName.Equals(toolchainToUse, System.StringComparison.Ordinal)); if (null == toolTypeToInstantiate) { throw new Bam.Core.Exception( $"Unable to identify {toolDescription} tool in {bitDepth.ToString()}-bit architectures for toolchain {toolchainToUse}" ); } toolModule = Bam.Core.Graph.Instance.MakeModuleOfType<ToolType>(toolTypeToInstantiate); return toolModule; } /// <summary> /// Get a suitable C compiler /// </summary> /// <param name="bitDepth">Bit-depth required to compile for</param> /// <returns>The compiler</returns> public static CompilerTool C_Compiler( EBit bitDepth) => GetTool<CompilerTool>(C_Compilers, bitDepth, "C compiler", ref Default[bitDepth].c_compiler); /// <summary> /// Get a suitable C++ compiler /// </summary> /// <param name="bitDepth">Bit-depth required to compile for</param> /// <returns>The compiler</returns> public static CompilerTool Cxx_Compiler( EBit bitDepth) => GetTool<CompilerTool>(Cxx_Compilers, bitDepth, "C++ compiler", ref Default[bitDepth].cxx_compiler); /// <summary> /// Get a suitable librarian /// </summary> /// <param name="bitDepth">Bit-depth required to archive for</param> /// <returns>The librarian</returns> public static LibrarianTool Librarian( EBit bitDepth) => GetTool<LibrarianTool>(Archivers, bitDepth, "librarian", ref Default[bitDepth].librarian); /// <summary> /// Get a suitable C linker /// </summary> /// <param name="bitDepth">Bit-depth required to link for</param> /// <returns>The linker</returns> public static LinkerTool C_Linker( EBit bitDepth) => GetTool<LinkerTool>(C_Linkers, bitDepth, "C linker", ref Default[bitDepth].c_linker); /// <summary> /// Get a suitable C++ linker /// </summary> /// <param name="bitDepth">Bit-depth required to link for</param> /// <returns>The linker</returns> public static LinkerTool Cxx_Linker( EBit bitDepth) => GetTool<LinkerTool>(Cxx_Linkers, bitDepth, "C++ linker", ref Default[bitDepth].cxx_linker); /// <summary> /// Get a suitable Objective C compiler /// </summary> /// <param name="bitDepth">Bit-depth required to compile for</param> /// <returns>The compiler</returns> public static CompilerTool ObjectiveC_Compiler( EBit bitDepth) => GetTool<CompilerTool>(ObjectiveC_Compilers, bitDepth, "Objective C compiler", ref Default[bitDepth].objc_compiler); /// <summary> /// Get a suitable Objective C++ compiler /// </summary> /// <param name="bitDepth">Bit-depth required to compile for</param> /// <returns>The compiler</returns> public static CompilerTool ObjectiveCxx_Compiler( EBit bitDepth) => GetTool<CompilerTool>(ObjectiveCxx_Compilers, bitDepth, "Objective C++ compiler", ref Default[bitDepth].objcxx_compiler); /// <summary> /// Get a suitable Windows resource compiler /// </summary> /// <param name="bitDepth">Bit-depth required to compile for</param> /// <returns>The compiler</returns> public static WinResourceCompilerTool WinResource_Compiler( EBit bitDepth) => GetTool<WinResourceCompilerTool>(WinResourceCompilers, bitDepth, "Windows resource compiler", ref Default[bitDepth].winres_compiler); /// <summary> /// Get a suitable assembler /// </summary> /// <param name="bitDepth">Bit-depth required to assemble for</param> /// <returns>The assembler</returns> public static AssemblerTool Assembler( EBit bitDepth) => GetTool<AssemblerTool>(Assemblers, bitDepth, "Assembler", ref Default[bitDepth].assembler); /// <summary> /// Get a suitable preprocessor /// </summary> /// <param name="bitDepth">Bit-depth required to preprocess for</param> /// <returns>The preprocessor</returns> public static PreprocessorTool Preprocessor( EBit bitDepth) => GetTool<PreprocessorTool>(Preprocessors, bitDepth, "Preprocessor", ref Default[bitDepth].preprocessor); } }
/* * [The "BSD license"] * Copyright (c) 2013 Terence Parr * Copyright (c) 2013 Sam Harwell * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. The name of the author may not be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.IO; using Antlr4.Runtime; using Antlr4.Runtime.Misc; using Antlr4.Runtime.Sharpen; namespace Antlr4.Runtime { /// <summary> /// Vacuum all input from a /// <see cref="System.IO.TextReader"/> /// / /// <see cref="System.IO.Stream"/> /// and then treat it /// like a /// <c>char[]</c> /// buffer. Can also pass in a /// <see cref="string"/> /// or /// <c>char[]</c> /// to use. /// <p>If you need encoding, pass in stream/reader with correct encoding.</p> /// </summary> public class AntlrInputStream : ICharStream { public const int ReadBufferSize = 1024; public const int InitialBufferSize = 1024; /// <summary>The data being scanned</summary> protected internal char[] data; /// <summary>How many characters are actually in the buffer</summary> protected internal int n; /// <summary>0..n-1 index into string of next char</summary> protected internal int p = 0; /// <summary>What is name or source of this char stream?</summary> public string name; public AntlrInputStream() { } /// <summary>Copy data in string to a local char array</summary> public AntlrInputStream(string input) { this.data = input.ToCharArray(); this.n = input.Length; } /// <summary>This is the preferred constructor for strings as no data is copied</summary> public AntlrInputStream(char[] data, int numberOfActualCharsInArray) { this.data = data; this.n = numberOfActualCharsInArray; } /// <exception cref="System.IO.IOException"/> public AntlrInputStream(TextReader r) : this(r, InitialBufferSize, ReadBufferSize) { } /// <exception cref="System.IO.IOException"/> public AntlrInputStream(TextReader r, int initialSize) : this(r, initialSize, ReadBufferSize) { } /// <exception cref="System.IO.IOException"/> public AntlrInputStream(TextReader r, int initialSize, int readChunkSize) { Load(r, initialSize, readChunkSize); } /// <exception cref="System.IO.IOException"/> public AntlrInputStream(Stream input) : this(new StreamReader(input), InitialBufferSize) { } /// <exception cref="System.IO.IOException"/> public AntlrInputStream(Stream input, int initialSize) : this(new StreamReader(input), initialSize) { } /// <exception cref="System.IO.IOException"/> public AntlrInputStream(Stream input, int initialSize, int readChunkSize) : this(new StreamReader(input), initialSize, readChunkSize) { } /// <exception cref="System.IO.IOException"/> public virtual void Load(TextReader r, int size, int readChunkSize) { if (r == null) { return; } data = r.ReadToEnd().ToCharArray(); n = data.Length; } /// <summary> /// Reset the stream so that it's in the same state it was /// when the object was created *except* the data array is not /// touched. /// </summary> /// <remarks> /// Reset the stream so that it's in the same state it was /// when the object was created *except* the data array is not /// touched. /// </remarks> public virtual void Reset() { p = 0; } public virtual void Consume() { if (p >= n) { System.Diagnostics.Debug.Assert(La(1) == IntStreamConstants.Eof); throw new InvalidOperationException("cannot consume EOF"); } //System.out.println("prev p="+p+", c="+(char)data[p]); if (p < n) { p++; } } //System.out.println("p moves to "+p+" (c='"+(char)data[p]+"')"); public virtual int La(int i) { if (i == 0) { return 0; } // undefined if (i < 0) { i++; // e.g., translate LA(-1) to use offset i=0; then data[p+0-1] if ((p + i - 1) < 0) { return IntStreamConstants.Eof; } } // invalid; no char before first char if ((p + i - 1) >= n) { //System.out.println("char LA("+i+")=EOF; p="+p); return IntStreamConstants.Eof; } //System.out.println("char LA("+i+")="+(char)data[p+i-1]+"; p="+p); //System.out.println("LA("+i+"); p="+p+" n="+n+" data.length="+data.length); return data[p + i - 1]; } public virtual int Lt(int i) { return La(i); } /// <summary> /// Return the current input symbol index 0..n where n indicates the /// last symbol has been read. /// </summary> /// <remarks> /// Return the current input symbol index 0..n where n indicates the /// last symbol has been read. The index is the index of char to /// be returned from LA(1). /// </remarks> public virtual int Index { get { return p; } } public virtual int Size { get { return n; } } /// <summary>mark/release do nothing; we have entire buffer</summary> public virtual int Mark() { return -1; } public virtual void Release(int marker) { } /// <summary> /// consume() ahead until p==index; can't just set p=index as we must /// update line and charPositionInLine. /// </summary> /// <remarks> /// consume() ahead until p==index; can't just set p=index as we must /// update line and charPositionInLine. If we seek backwards, just set p /// </remarks> public virtual void Seek(int index) { if (index <= p) { p = index; // just jump; don't update stream state (line, ...) return; } // seek forward, consume until p hits index or n (whichever comes first) index = Math.Min(index, n); while (p < index) { Consume(); } } public virtual string GetText(Interval interval) { int start = interval.a; int stop = interval.b; if (stop >= n) { stop = n - 1; } int count = stop - start + 1; if (start >= n) { return string.Empty; } // System.err.println("data: "+Arrays.toString(data)+", n="+n+ // ", start="+start+ // ", stop="+stop); return new string(data, start, count); } public virtual string SourceName { get { if (string.IsNullOrEmpty(name)) { return IntStreamConstants.UnknownSourceName; } return name; } } public override string ToString() { return new string(data); } } }
/* * Copyright 2013 ZXing authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System.Globalization; using System.Text; namespace ZXing.PDF417.Internal { /// <summary> /// /// </summary> /// <author>Guenther Grau</author> public class DetectionResult { private const int ADJUST_ROW_NUMBER_SKIP = 2; public BarcodeMetadata Metadata { get; private set; } public DetectionResultColumn[] DetectionResultColumns { get; set; } public BoundingBox Box { get; set; } public int ColumnCount { get; private set; } public int RowCount { get { return Metadata.RowCount; } } public int ErrorCorrectionLevel { get { return Metadata.ErrorCorrectionLevel; } } public DetectionResult(BarcodeMetadata metadata, BoundingBox box) { Metadata = metadata; Box = box; ColumnCount = metadata.ColumnCount; DetectionResultColumns = new DetectionResultColumn[ColumnCount + 2]; } /// <summary> /// Returns the DetectionResult Columns. This does a fair bit of calculation, so call it sparingly. /// </summary> /// <returns>The detection result columns.</returns> public DetectionResultColumn[] getDetectionResultColumns() { adjustIndicatorColumnRowNumbers(DetectionResultColumns[0]); adjustIndicatorColumnRowNumbers(DetectionResultColumns[ColumnCount + 1]); int unadjustedCodewordCount = PDF417Common.MAX_CODEWORDS_IN_BARCODE; int previousUnadjustedCount; do { previousUnadjustedCount = unadjustedCodewordCount; unadjustedCodewordCount = adjustRowNumbers(); } while (unadjustedCodewordCount > 0 && unadjustedCodewordCount < previousUnadjustedCount); return DetectionResultColumns; } /// <summary> /// Adjusts the indicator column row numbers. /// </summary> /// <param name="detectionResultColumn">Detection result column.</param> private void adjustIndicatorColumnRowNumbers(DetectionResultColumn detectionResultColumn) { if (detectionResultColumn != null) { ((DetectionResultRowIndicatorColumn) detectionResultColumn) .adjustCompleteIndicatorColumnRowNumbers(Metadata); } } /// <summary> /// return number of codewords which don't have a valid row number. Note that the count is not accurate as codewords . /// will be counted several times. It just serves as an indicator to see when we can stop adjusting row numbers /// </summary> /// <returns>The row numbers.</returns> private int adjustRowNumbers() { // TODO ensure that no detected codewords with unknown row number are left // we should be able to estimate the row height and use it as a hint for the row number // we should also fill the rows top to bottom and bottom to top int unadjustedCount = adjustRowNumbersByRow(); if (unadjustedCount == 0) { return 0; } for (int barcodeColumn = 1; barcodeColumn < ColumnCount + 1; barcodeColumn++) { Codeword[] codewords = DetectionResultColumns[barcodeColumn].Codewords; for (int codewordsRow = 0; codewordsRow < codewords.Length; codewordsRow++) { if (codewords[codewordsRow] == null) { continue; } if (!codewords[codewordsRow].HasValidRowNumber) { adjustRowNumbers(barcodeColumn, codewordsRow, codewords); } } } return unadjustedCount; } /// <summary> /// Adjusts the row numbers by row. /// </summary> /// <returns>The row numbers by row.</returns> private int adjustRowNumbersByRow() { adjustRowNumbersFromBothRI(); // RI = RowIndicators // TODO we should only do full row adjustments if row numbers of left and right row indicator column match. // Maybe it's even better to calculated the height (in codeword rows) and divide it by the number of barcode // rows. This, together with the LRI and RRI row numbers should allow us to get a good estimate where a row // number starts and ends. int unadjustedCount = adjustRowNumbersFromLRI(); return unadjustedCount + adjustRowNumbersFromRRI(); } /// <summary> /// Adjusts the row numbers from both Row Indicators /// </summary> /// <returns> zero </returns> private void adjustRowNumbersFromBothRI() { if (DetectionResultColumns[0] == null || DetectionResultColumns[ColumnCount + 1] == null) { return; } Codeword[] LRIcodewords = DetectionResultColumns[0].Codewords; Codeword[] RRIcodewords = DetectionResultColumns[ColumnCount + 1].Codewords; for (int codewordsRow = 0; codewordsRow < LRIcodewords.Length; codewordsRow++) { if (LRIcodewords[codewordsRow] != null && RRIcodewords[codewordsRow] != null && LRIcodewords[codewordsRow].RowNumber == RRIcodewords[codewordsRow].RowNumber) { for (int barcodeColumn = 1; barcodeColumn <= ColumnCount; barcodeColumn++) { Codeword codeword = DetectionResultColumns[barcodeColumn].Codewords[codewordsRow]; if (codeword == null) { continue; } codeword.RowNumber = LRIcodewords[codewordsRow].RowNumber; if (!codeword.HasValidRowNumber) { // LOG.info("Removing codeword with invalid row number, cw[" + codewordsRow + "][" + barcodeColumn + "]"); DetectionResultColumns[barcodeColumn].Codewords[codewordsRow] = null; } } } } } /// <summary> /// Adjusts the row numbers from Right Row Indicator. /// </summary> /// <returns>The unadjusted row count.</returns> private int adjustRowNumbersFromRRI() { if (DetectionResultColumns[ColumnCount + 1] == null) { return 0; } int unadjustedCount = 0; Codeword[] codewords = DetectionResultColumns[ColumnCount + 1].Codewords; for (int codewordsRow = 0; codewordsRow < codewords.Length; codewordsRow++) { if (codewords[codewordsRow] == null) { continue; } int rowIndicatorRowNumber = codewords[codewordsRow].RowNumber; int invalidRowCounts = 0; for (int barcodeColumn = ColumnCount + 1; barcodeColumn > 0 && invalidRowCounts < ADJUST_ROW_NUMBER_SKIP; barcodeColumn--) { Codeword codeword = DetectionResultColumns[barcodeColumn].Codewords[codewordsRow]; if (codeword != null) { invalidRowCounts = adjustRowNumberIfValid(rowIndicatorRowNumber, invalidRowCounts, codeword); if (!codeword.HasValidRowNumber) { unadjustedCount++; } } } } return unadjustedCount; } /// <summary> /// Adjusts the row numbers from Left Row Indicator. /// </summary> /// <returns> Unadjusted row Count.</returns> private int adjustRowNumbersFromLRI() { if (DetectionResultColumns[0] == null) { return 0; } int unadjustedCount = 0; Codeword[] codewords = DetectionResultColumns[0].Codewords; for (int codewordsRow = 0; codewordsRow < codewords.Length; codewordsRow++) { if (codewords[codewordsRow] == null) { continue; } int rowIndicatorRowNumber = codewords[codewordsRow].RowNumber; int invalidRowCounts = 0; for (int barcodeColumn = 1; barcodeColumn < ColumnCount + 1 && invalidRowCounts < ADJUST_ROW_NUMBER_SKIP; barcodeColumn++) { Codeword codeword = DetectionResultColumns[barcodeColumn].Codewords[codewordsRow]; if (codeword != null) { invalidRowCounts = adjustRowNumberIfValid(rowIndicatorRowNumber, invalidRowCounts, codeword); if (!codeword.HasValidRowNumber) { unadjustedCount++; } } } } return unadjustedCount; } /// <summary> /// Adjusts the row number if valid. /// </summary> /// <returns>The invalid rows</returns> /// <param name="rowIndicatorRowNumber">Row indicator row number.</param> /// <param name="invalidRowCounts">Invalid row counts.</param> /// <param name="codeword">Codeword.</param> private static int adjustRowNumberIfValid(int rowIndicatorRowNumber, int invalidRowCounts, Codeword codeword) { if (codeword == null) { return invalidRowCounts; } if (!codeword.HasValidRowNumber) { if (codeword.IsValidRowNumber(rowIndicatorRowNumber)) { codeword.RowNumber = rowIndicatorRowNumber; invalidRowCounts = 0; } else { ++invalidRowCounts; } } return invalidRowCounts; } /// <summary> /// Adjusts the row numbers. /// </summary> /// <param name="barcodeColumn">Barcode column.</param> /// <param name="codewordsRow">Codewords row.</param> /// <param name="codewords">Codewords.</param> private void adjustRowNumbers(int barcodeColumn, int codewordsRow, Codeword[] codewords) { Codeword codeword = codewords[codewordsRow]; Codeword[] previousColumnCodewords = DetectionResultColumns[barcodeColumn - 1].Codewords; Codeword[] nextColumnCodewords = previousColumnCodewords; if (DetectionResultColumns[barcodeColumn + 1] != null) { nextColumnCodewords = DetectionResultColumns[barcodeColumn + 1].Codewords; } Codeword[] otherCodewords = new Codeword[14]; otherCodewords[2] = previousColumnCodewords[codewordsRow]; otherCodewords[3] = nextColumnCodewords[codewordsRow]; if (codewordsRow > 0) { otherCodewords[0] = codewords[codewordsRow - 1]; otherCodewords[4] = previousColumnCodewords[codewordsRow - 1]; otherCodewords[5] = nextColumnCodewords[codewordsRow - 1]; } if (codewordsRow > 1) { otherCodewords[8] = codewords[codewordsRow - 2]; otherCodewords[10] = previousColumnCodewords[codewordsRow - 2]; otherCodewords[11] = nextColumnCodewords[codewordsRow - 2]; } if (codewordsRow < codewords.Length - 1) { otherCodewords[1] = codewords[codewordsRow + 1]; otherCodewords[6] = previousColumnCodewords[codewordsRow + 1]; otherCodewords[7] = nextColumnCodewords[codewordsRow + 1]; } if (codewordsRow < codewords.Length - 2) { otherCodewords[9] = codewords[codewordsRow + 2]; otherCodewords[12] = previousColumnCodewords[codewordsRow + 2]; otherCodewords[13] = nextColumnCodewords[codewordsRow + 2]; } foreach (Codeword otherCodeword in otherCodewords) { if (adjustRowNumber(codeword, otherCodeword)) { return; } } } /// <summary> /// Adjusts the row number. /// </summary> /// <returns><c>true</c>, if row number was adjusted, <c>false</c> otherwise.</returns> /// <param name="codeword">Codeword.</param> /// <param name="otherCodeword">Other codeword.</param> private static bool adjustRowNumber(Codeword codeword, Codeword otherCodeword) { if (otherCodeword == null) { return false; } if (otherCodeword.HasValidRowNumber && otherCodeword.Bucket == codeword.Bucket) { codeword.RowNumber = otherCodeword.RowNumber; return true; } return false; } /// <summary> /// Returns a <see cref="System.String"/> that represents the current <see cref="ZXing.PDF417.Internal.DetectionResult"/>. /// </summary> /// <returns>A <see cref="System.String"/> that represents the current <see cref="ZXing.PDF417.Internal.DetectionResult"/>.</returns> public override string ToString() { StringBuilder formatter = new StringBuilder(); DetectionResultColumn rowIndicatorColumn = DetectionResultColumns[0]; if (rowIndicatorColumn == null) { rowIndicatorColumn = DetectionResultColumns[ColumnCount + 1]; } for (int codewordsRow = 0; codewordsRow < rowIndicatorColumn.Codewords.Length; codewordsRow++) { formatter.AppendFormat(CultureInfo.InvariantCulture, "CW {0,3}:", codewordsRow); for (int barcodeColumn = 0; barcodeColumn < ColumnCount + 2; barcodeColumn++) { if (DetectionResultColumns[barcodeColumn] == null) { formatter.Append(" | "); continue; } Codeword codeword = DetectionResultColumns[barcodeColumn].Codewords[codewordsRow]; if (codeword == null) { formatter.Append(" | "); continue; } formatter.AppendFormat(CultureInfo.InvariantCulture, " {0,3}|{1,3}", codeword.RowNumber, codeword.Value); } formatter.Append("\n"); } return formatter.ToString(); } } }
using System; using System.ComponentModel; using System.Windows.Threading; using System.Windows; using System.Windows.Automation.Peers; using System.Windows.Controls.Primitives; using System.Windows.Input; using System.Windows.Media; using System.Windows.Shapes; using MS.Utility; using MS.Internal.KnownBoxes; namespace System.Windows.Controls { /// <summary> /// Represents the standard button component that inherently reacts to the Click event. /// The Button control is one of the most basic forms of user interface (UI). /// </summary> public class Button: ButtonBase { #region Constructors static Button() { DefaultStyleKeyProperty.OverrideMetadata(typeof(Button), new FrameworkPropertyMetadata(typeof(Button))); _dType = DependencyObjectType.FromSystemTypeInternal(typeof(Button)); // WORKAROUND: the following if statement is a workaround to get the ButtonBase cctor to run before we // override metadata. if (ButtonBase.CommandProperty != null) { IsEnabledProperty.OverrideMetadata(typeof(Button), new FrameworkPropertyMetadata(new PropertyChangedCallback(OnIsEnabledChanged))); } } /// <summary> /// Default Button constructor /// </summary> /// <remarks> /// Automatic determination of current Dispatcher. Use alternative constructor /// that accepts a Dispatcher for best performance. /// </remarks> public Button() : base() { } #endregion #region Properties #region IsDefault /// <summary> /// The DependencyProperty for the IsDefault property. /// Flags: None /// Default Value: false /// </summary> public static readonly DependencyProperty IsDefaultProperty = DependencyProperty.Register("IsDefault", typeof(bool), typeof(Button), new FrameworkPropertyMetadata(BooleanBoxes.FalseBox, new PropertyChangedCallback(OnIsDefaultChanged))); /// <summary> /// Specifies whether or not this button is the default button. /// </summary> /// <value></value> public bool IsDefault { get { return (bool) GetValue(IsDefaultProperty); } set { SetValue(IsDefaultProperty, BooleanBoxes.Box(value)); } } private static void OnIsDefaultChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { Button b = d as Button; KeyboardFocusChangedEventHandler focusChangedEventHandler = FocusChangedEventHandlerField.GetValue(b); if (focusChangedEventHandler == null) { focusChangedEventHandler = new KeyboardFocusChangedEventHandler(b.OnFocusChanged); FocusChangedEventHandlerField.SetValue(b, focusChangedEventHandler); } if ((bool) e.NewValue) { AccessKeyManager.Register("\x000D", b); KeyboardNavigation.Current.FocusChanged += focusChangedEventHandler; b.UpdateIsDefaulted(Keyboard.FocusedElement); } else { AccessKeyManager.Unregister("\x000D", b); KeyboardNavigation.Current.FocusChanged -= focusChangedEventHandler; b.UpdateIsDefaulted(null); } } private static void OnIsEnabledChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { // This value is cached in FE, so all we have to do here is look at the new value Button b = ((Button)d); // If it's not a default button we don't need to update the IsDefaulted property if (b.IsDefault) { b.UpdateIsDefaulted(Keyboard.FocusedElement); } } #endregion #region IsCancel /// <summary> /// The DependencyProperty for the IsCancel property. /// Flags: None /// Default Value: false /// </summary> public static readonly DependencyProperty IsCancelProperty = DependencyProperty.Register( "IsCancel", typeof(bool), typeof(Button), new FrameworkPropertyMetadata( BooleanBoxes.FalseBox, new PropertyChangedCallback(OnIsCancelChanged))); /// <summary> /// Specifies whether or not this button is the cancel button. /// </summary> /// <value></value> public bool IsCancel { get { return (bool) GetValue(IsCancelProperty); } set { SetValue(IsCancelProperty, BooleanBoxes.Box(value)); } } private static void OnIsCancelChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { Button b = d as Button; if ((bool) e.NewValue) { AccessKeyManager.Register("\x001B", b); } else { AccessKeyManager.Unregister("\x001B", b); } } #endregion #region IsDefaulted /// <summary> /// The key needed set a read-only property. /// </summary> private static readonly DependencyPropertyKey IsDefaultedPropertyKey = DependencyProperty.RegisterReadOnly("IsDefaulted", typeof(bool), typeof(Button), new FrameworkPropertyMetadata(BooleanBoxes.FalseBox)); /// <summary> /// The DependencyProperty for the IsDefaulted property. /// Flags: None /// Default Value: false /// </summary> public static readonly DependencyProperty IsDefaultedProperty = IsDefaultedPropertyKey.DependencyProperty; /// <summary> /// Specifies whether or not this button is the button that would be invoked when Enter is pressed. /// </summary> /// <value></value> public bool IsDefaulted { get { return (bool)GetValue(IsDefaultedProperty); } } #endregion #endregion #region Private helpers private void OnFocusChanged(object sender, KeyboardFocusChangedEventArgs e) { UpdateIsDefaulted(Keyboard.FocusedElement); } private void UpdateIsDefaulted(IInputElement focus) { // If it's not a default button, or nothing is focused, or it's disabled then it's not defaulted. if (!IsDefault || focus == null || !IsEnabled) { SetValue(IsDefaultedPropertyKey, BooleanBoxes.FalseBox); return; } DependencyObject focusDO = focus as DependencyObject; object thisScope, focusScope; // If the focused thing is not in this scope then IsDefaulted = false AccessKeyPressedEventArgs e; object isDefaulted = BooleanBoxes.FalseBox; try { // Step 1: Determine the AccessKey scope from currently focused element e = new AccessKeyPressedEventArgs(); focus.RaiseEvent(e); focusScope = e.Scope; // Step 2: Determine the AccessKey scope from this button e = new AccessKeyPressedEventArgs(); this.RaiseEvent(e); thisScope = e.Scope; // Step 3: Compare scopes if (thisScope == focusScope && (focusDO == null || (bool)focusDO.GetValue(KeyboardNavigation.AcceptsReturnProperty) == false)) { isDefaulted = BooleanBoxes.TrueBox; } } finally { SetValue(IsDefaultedPropertyKey, isDefaulted); } } #endregion Private helpers #region Override methods /// <summary> /// Creates AutomationPeer (<see cref="UIElement.OnCreateAutomationPeer"/>) /// </summary> protected override System.Windows.Automation.Peers.AutomationPeer OnCreateAutomationPeer() { return new System.Windows.Automation.Peers.ButtonAutomationPeer(this); } /// <summary> /// This method is called when button is clicked. /// </summary> protected override void OnClick() { if (AutomationPeer.ListenerExists(AutomationEvents.InvokePatternOnInvoked)) { AutomationPeer peer = UIElementAutomationPeer.CreatePeerForElement(this); if (peer != null) peer.RaiseAutomationEvent(AutomationEvents.InvokePatternOnInvoked); } // base.OnClick should be called first. // Our default command for Cancel Button to close dialog should happen // after Button's click event handler has been called. // If there is excption and it's a Cancel button and RoutedCommand is null, // We will raise Window.DialogCancelCommand. try { base.OnClick(); } finally { // When the Button RoutedCommand is null, if it's a Cancel Button, Window.DialogCancelCommand will // be the default command. Do not assign Window.DialogCancelCommand to Button.Command. // If in Button click handler user nulls the Command, we still want to provide the default behavior. if ((Command == null) && IsCancel) { // Can't invoke Window.DialogCancelCommand directly. Have to raise event. // Filed bug 936090: Commanding perf issue: can't directly invoke a command. MS.Internal.Commands.CommandHelpers.ExecuteCommand(Window.DialogCancelCommand, null, this); } } } // // This property // 1. Finds the correct initial size for the _effectiveValues store on the current DependencyObject // 2. This is a performance optimization // internal override int EffectiveValuesInitialSize { get { return 42; } } #endregion #region Data // This field is used to hang on to the event handler that we // hand out to KeyboardNavigation. On the KeyNav side it's tracked // as a WeakReference so when we hand it out we need to make sure // that we hold a strong reference ourselves. We only need this // handler when we are a Default button (very uncommon). private static readonly UncommonField<KeyboardFocusChangedEventHandler> FocusChangedEventHandlerField = new UncommonField<KeyboardFocusChangedEventHandler>(); #endregion #region DTypeThemeStyleKey // Returns the DependencyObjectType for the registered ThemeStyleKey's default // value. Controls will override this method to return approriate types. internal override DependencyObjectType DTypeThemeStyleKey { get { return _dType; } } private static DependencyObjectType _dType; #endregion DTypeThemeStyleKey } }
using System; using System.Linq; using System.Linq.Expressions; using PlainElastic.Net.Utils; namespace PlainElastic.Net { /// <summary> /// Builds a command that allows to execute a search query and get back search hits that match the query. /// http://www.elasticsearch.org/guide/reference/api/search/uri-request.html /// </summary> public class SearchCommand : CommandBuilder<SearchCommand> { public string Index { get; private set; } public string Type { get; private set; } public SearchCommand(string index = null, string type = null) { Index = index; Type = type; } public SearchCommand(string[] indexes, string[] types) { Index = indexes.JoinWithComma(); Type = types.JoinWithComma(); } #region Query Parameters /// <summary> /// The analyzer name to be used when analyzing the query string. /// </summary> public SearchCommand Analyzer(string analyzer) { WithParameter("analyzer", analyzer); return this; } /// <summary> /// The analyzer name to be used when analyzing the query string. /// </summary> public SearchCommand Analyzer(DefaultAnalyzers analyzer) { WithParameter("analyzer", analyzer.AsString()); return this; } /// <summary> /// Should wildcard and prefix queries be analyzed or not. Defaults to false. /// </summary> public SearchCommand AnalyzeWildcard(bool analyzeWildcard = false) { WithParameter("analyze_wildcard", analyzeWildcard.AsString()); return this; } /// <summary> /// The default field to use when no field prefix is defined within the query. /// </summary> public SearchCommand Df(string defaultField) { WithParameter("df", defaultField); return this; } /// <summary> /// The default operator to be used, can be AND or OR. Defaults to OR. /// </summary> public SearchCommand DefaultOperator(Operator defaultOperator = Operator.OR) { WithParameter("default_operator", defaultOperator.AsString()); return this; } /// <summary> /// Includes explanation of how scoring of the hits was computed for each hit. /// </summary> public SearchCommand Explain() { WithParameter("explain", "true"); return this; } /// <summary> /// The selective fields of the document to return for each hit. /// </summary> public SearchCommand Fields(string fields) { WithParameter("fields", fields); return this; } /// <summary> /// The selective fields of the document to return for each hit. /// </summary> public SearchCommand Fields<T>(params Expression<Func<T, object>>[] properties) { string fields = properties.Select(prop => prop.GetPropertyPath()).JoinWithComma(); WithParameter("fields", fields); return this; } /// <summary> /// The starting from index of the hits to return. Defaults to 0. /// </summary> public SearchCommand From(int fromIndex = 0) { WithParameter("from", fromIndex.AsString()); return this; } /// <summary> /// Determines whether terms should be automatically lowercased or not. Defaults to true. /// </summary> public SearchCommand LowercaseExpandedTerms(bool lowercaseExpandedTerms = true) { WithParameter("lowercase_expanded_terms", lowercaseExpandedTerms.AsString()); return this; } /// <summary> /// The query string (maps to the query_string query). /// </summary> public SearchCommand Q(string query) { WithParameter("q", query); return this; } /// <summary> /// A comma separated list of the routing values to control which shards the count request will be executed on. /// </summary> public SearchCommand Routing(string routing) { WithParameter("routing", routing); return this; } /// <summary> /// The scroll parameter is a time value parameter (for example: scroll=5m), /// indicating for how long the nodes that participate in the search will maintain relevant resources in order to continue and support it. /// see http://www.elasticsearch.org/guide/reference/api/search/scroll.html /// </summary> public SearchCommand Scroll(string scrollActiveTime) { WithParameter("scroll", scrollActiveTime); return this; } /// <summary> /// The type of the search operation to perform. /// Defaults to query_then_fetch. /// see http://www.elasticsearch.org/guide/reference/api/search/search-type.html /// </summary> public SearchCommand SearchType(SearchType searchType) { WithParameter("search_type", searchType.AsString()); return this; } /// <summary> /// The number of hits to return. Defaults to 10. /// </summary> public SearchCommand Size(int size = 10) { WithParameter("size", size.AsString()); return this; } /// <summary> /// Sorting to perform. There can be several Sort parameters (order is important). /// Use "_score" to sort by query score. /// </summary> public SearchCommand Sort(string fieldname, SortDirection direction = SortDirection.@default) { string value = fieldname; if (direction != SortDirection.@default) value += ":" + direction.AsString(); WithParameter("sort", value); return this; } /// <summary> /// Sorting to perform. There can be several Sort parameters (order is important). /// </summary> public SearchCommand Sort<T>(Expression<Func<T, object>> property, SortDirection direction = SortDirection.@default) { string fieldname = property.GetPropertyPath(); return Sort(fieldname, direction); } public SearchCommand Timeout(string timeout) { WithParameter("timeout", timeout); return this; } public SearchCommand TrackScores(bool trackScores) { WithParameter("track_scores", trackScores.AsString()); return this; } #endregion protected override string BuildUrlPath() { return UrlBuilder.BuildUrlPath(Index, Type, "_search"); } } }
// Copyright (c) Alex Ghiondea. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using SmugMug.v2.Authentication; namespace SmugMug.v2.Types { public partial class ImageSizeDetailsEntity : SmugMugEntity { private string _imageSizeLarge; private string _imageSizeMedium; private string _imageSizeOriginal; private string _imageSizeSmall; private string _imageSizeThumb; private string _imageSizeTiny; private string _imageSizeX2Large; private string _imageSizeX3Large; private string _imageSizeXLarge; private string _imageUrlTemplate; private string _usableSizes; private string _videoSize110; private string _videoSize1280; private string _videoSize1920; private string _videoSize200; private string _videoSize320; private string _videoSize640; private string _videoSize960; private string _videoSMIL; public string ImageSizeLarge { get { return _imageSizeLarge; } set { if (_imageSizeLarge != value) { NotifyPropertyValueChanged("ImageSizeLarge", oldValue:_imageSizeLarge, newValue: value); _imageSizeLarge = value; } } } public string ImageSizeMedium { get { return _imageSizeMedium; } set { if (_imageSizeMedium != value) { NotifyPropertyValueChanged("ImageSizeMedium", oldValue:_imageSizeMedium, newValue: value); _imageSizeMedium = value; } } } public string ImageSizeOriginal { get { return _imageSizeOriginal; } set { if (_imageSizeOriginal != value) { NotifyPropertyValueChanged("ImageSizeOriginal", oldValue:_imageSizeOriginal, newValue: value); _imageSizeOriginal = value; } } } public string ImageSizeSmall { get { return _imageSizeSmall; } set { if (_imageSizeSmall != value) { NotifyPropertyValueChanged("ImageSizeSmall", oldValue:_imageSizeSmall, newValue: value); _imageSizeSmall = value; } } } public string ImageSizeThumb { get { return _imageSizeThumb; } set { if (_imageSizeThumb != value) { NotifyPropertyValueChanged("ImageSizeThumb", oldValue:_imageSizeThumb, newValue: value); _imageSizeThumb = value; } } } public string ImageSizeTiny { get { return _imageSizeTiny; } set { if (_imageSizeTiny != value) { NotifyPropertyValueChanged("ImageSizeTiny", oldValue:_imageSizeTiny, newValue: value); _imageSizeTiny = value; } } } public string ImageSizeX2Large { get { return _imageSizeX2Large; } set { if (_imageSizeX2Large != value) { NotifyPropertyValueChanged("ImageSizeX2Large", oldValue:_imageSizeX2Large, newValue: value); _imageSizeX2Large = value; } } } public string ImageSizeX3Large { get { return _imageSizeX3Large; } set { if (_imageSizeX3Large != value) { NotifyPropertyValueChanged("ImageSizeX3Large", oldValue:_imageSizeX3Large, newValue: value); _imageSizeX3Large = value; } } } public string ImageSizeXLarge { get { return _imageSizeXLarge; } set { if (_imageSizeXLarge != value) { NotifyPropertyValueChanged("ImageSizeXLarge", oldValue:_imageSizeXLarge, newValue: value); _imageSizeXLarge = value; } } } public string ImageUrlTemplate { get { return _imageUrlTemplate; } set { if (_imageUrlTemplate != value) { NotifyPropertyValueChanged("ImageUrlTemplate", oldValue:_imageUrlTemplate, newValue: value); _imageUrlTemplate = value; } } } public string UsableSizes { get { return _usableSizes; } set { if (_usableSizes != value) { NotifyPropertyValueChanged("UsableSizes", oldValue:_usableSizes, newValue: value); _usableSizes = value; } } } public string VideoSize110 { get { return _videoSize110; } set { if (_videoSize110 != value) { NotifyPropertyValueChanged("VideoSize110", oldValue:_videoSize110, newValue: value); _videoSize110 = value; } } } public string VideoSize1280 { get { return _videoSize1280; } set { if (_videoSize1280 != value) { NotifyPropertyValueChanged("VideoSize1280", oldValue:_videoSize1280, newValue: value); _videoSize1280 = value; } } } public string VideoSize1920 { get { return _videoSize1920; } set { if (_videoSize1920 != value) { NotifyPropertyValueChanged("VideoSize1920", oldValue:_videoSize1920, newValue: value); _videoSize1920 = value; } } } public string VideoSize200 { get { return _videoSize200; } set { if (_videoSize200 != value) { NotifyPropertyValueChanged("VideoSize200", oldValue:_videoSize200, newValue: value); _videoSize200 = value; } } } public string VideoSize320 { get { return _videoSize320; } set { if (_videoSize320 != value) { NotifyPropertyValueChanged("VideoSize320", oldValue:_videoSize320, newValue: value); _videoSize320 = value; } } } public string VideoSize640 { get { return _videoSize640; } set { if (_videoSize640 != value) { NotifyPropertyValueChanged("VideoSize640", oldValue:_videoSize640, newValue: value); _videoSize640 = value; } } } public string VideoSize960 { get { return _videoSize960; } set { if (_videoSize960 != value) { NotifyPropertyValueChanged("VideoSize960", oldValue:_videoSize960, newValue: value); _videoSize960 = value; } } } public string VideoSMIL { get { return _videoSMIL; } set { if (_videoSMIL != value) { NotifyPropertyValueChanged("VideoSMIL", oldValue:_videoSMIL, newValue: value); _videoSMIL = value; } } } } }
//Copyright (c) Microsoft Corporation. All rights reserved. using System; using System.Collections.Generic; using System.Text; using System.ServiceModel; using System.ServiceModel.Description; using System.ServiceModel.Channels; using System.Globalization; using System.Xml; using System.IO; using WsdlNS = System.Web.Services.Description; using System.Data.SqlClient; namespace Microsoft.Samples.SsbTransportChannel { public sealed class SsbBindingElement : TransportBindingElement, IPolicyExportExtension, IWsdlExportExtension { string sqlConnectionString; Guid? conversationGroupId = null; bool senderEndsConversationOnClose = false; string contract = "DEFAULT"; bool useActionForSsbMessageType = false; bool useEncryption = false; static XmlDocument xmlDocument; public SsbBindingElement() : this(null,null) { } public SsbBindingElement(string sqlConnectionString) : this(sqlConnectionString,null) { } public SsbBindingElement(string sqlConnectionString, Guid conversationGroupId) : this(sqlConnectionString, (Guid?)conversationGroupId) { } SsbBindingElement(string sqlConnectionString, Guid? conversationGroupId) : base() { this.sqlConnectionString = sqlConnectionString; this.conversationGroupId = conversationGroupId; ValidateConnectionString(sqlConnectionString); } public SsbBindingElement(SsbBindingElement other) : base(other) { this.SqlConnectionString = other.SqlConnectionString; this.SenderEndsConversationOnClose = other.SenderEndsConversationOnClose; this.contract = other.contract; this.useEncryption = other.useEncryption; this.useActionForSsbMessageType = other.useActionForSsbMessageType; if (other.IsConversationGroupSpecified) { this.ConversationGroupId = other.ConversationGroupId; } } /// <summary> /// /// Cache a list of valid connection strings. There will typlically /// be only one, or perhaps two different connection strings for an AppDomain. /// So cache them to avoid the repetive expensive parsing using SqlConnectionStringBuilder /// </summary> static SortedList<string,string> validConnectionStrings = new SortedList<string,string>(); /// <summary> /// Validate that the connection string meets the requirements to be used by the channel. /// In particular Asynchronous Processing and MultipleActiveResultSets are required. /// </summary> /// <param name="constr"></param> void ValidateConnectionString(string constr) { if (constr == null) { return; } if (validConnectionStrings.ContainsKey(constr)) { return; } SqlConnectionStringBuilder sb = new SqlConnectionStringBuilder(constr); if (!sb.AsynchronousProcessing) { throw new InvalidOperationException("SQL ConnectionString must have Asynchronous Processing=true"); } if (!sb.MultipleActiveResultSets) { throw new InvalidOperationException("SQL ConnectionString must have MultipleActiveResultSets=true"); } //arbitrary limit for cached validated connection strings. if (validConnectionStrings.Count < 100) { lock (validConnectionStrings) { if (!validConnectionStrings.Keys.Contains(constr)) { validConnectionStrings.Add(constr,constr); } } } } /// <summary> /// Returns "net.ssb", the URI scheme for Service Broker. /// </summary> public override string Scheme { get { return SsbConstants.Scheme; } } /// <summary> /// The SqlConnection string used for the System.Data.SqlConnection connecting to the /// SQL Server instance and database hosting the local endpoint for the Service Broker converdsation. /// /// If the target service is in a remote databsae or a remote SQL Server instance, a Service Broker route must exist to the target. /// </summary> public string SqlConnectionString { get { return this.sqlConnectionString; } set { ValidateConnectionString(value); this.sqlConnectionString = value; } } /// <summary> /// The name of the Service Broker contract to use. Defaults to 'default'. /// </summary> public string Contract { get { return contract; } set { contract = SsbHelper.ValidateIdentifier(value); } } /// <summary> /// Controls whether to use Service Broker encryption on the conversation. /// </summary> public bool UseEncryption { get { return useEncryption; } set { useEncryption = value; } } /// <summary> /// When using a Service Broker contract if the contract requires a message type other /// than DEFAULT, set the OperationContract's Action to the required Ssb Message type /// and set the UseActionForSsbMessageType flag on the binding. /// </summary> public bool UseActionForSsbMessageType { get { return useActionForSsbMessageType; } set { useActionForSsbMessageType = value; } } /// <summary> /// If set to true this property instructs the SsbOutputChannel to send the EndConversation message /// in its OnClose method. If set to true, it will not be possible to get response messages on the conversation /// or send additional messages on the conversation. However if this property is not set, then you must /// end the conversation explicitly at some time in the future, either sua sponte or in response to an End Conversatoin /// message from the other end of the conversation. /// /// As an alternative, you can place an activiated stored procedure on the local end of the queue to process /// responses or end conversation messages. /// /// The default value of SenderEndsConversationOnClose is false. /// </summary> public bool SenderEndsConversationOnClose { get { return senderEndsConversationOnClose; } set { senderEndsConversationOnClose = value; } } /// TODO (dbrowne) Evaluate whether we need conversation binding. Without it, waiting for a reply on a ///specific conversation in a conversation group may return messages in a related conversation. /// /// To support SsbConversationGroupSender and Reciever would need parallel implementations of some methods. /// And if it's really needed it will be easier to add later after everything else is nailed down, and we /// won't have to evolve the parallel code paths together. /// /// /// /// //public bool IsConversationHandleSpecified //{ // get { return conversationHandle.HasValue; } //} //public Guid ConversationHandle //{ // get // { // if (!conversationHandle.HasValue) // { // throw new InvalidOperationException("Conversation Group ID not specified"); // } // return conversationHandle.Value; // } // set { conversationHandle = value; } //} /// <summary> /// Indicates whether a Channel Listener should be restricted to a particular conversation group. This is useful /// for waiting for response messages on a conversation. /// </summary> public bool IsConversationGroupSpecified { get { return conversationGroupId.HasValue; } } public Guid ConversationGroupId { get { if (!conversationGroupId.HasValue) { throw new InvalidOperationException("Conversation Group ID not specified"); } return conversationGroupId.Value; } set { conversationGroupId = value; } } public override BindingElement Clone() { return new SsbBindingElement(this); } public override IChannelFactory<TChannel> BuildChannelFactory<TChannel>(BindingContext context) { if (context == null) { throw new ArgumentNullException("context"); } return (IChannelFactory<TChannel>)(object)new SsbChannelFactory(this, context); } public override IChannelListener<TChannel> BuildChannelListener<TChannel>(BindingContext context) { if (context == null) { throw new ArgumentNullException("context"); } if (!this.CanBuildChannelListener<TChannel>(context)) { throw new ArgumentException(string.Format(CultureInfo.CurrentCulture, "Unsupported channel type: {0}.", typeof(TChannel).Name)); } return (IChannelListener<TChannel>)(object)new SsbChannelListener(this, context); } public override bool CanBuildChannelFactory<TChannel>(BindingContext context) { return (typeof(TChannel) == typeof(IOutputSessionChannel)); } public override bool CanBuildChannelListener<TChannel>(BindingContext context) { return (typeof(TChannel) == typeof(IInputSessionChannel)); } #region IPolicyExportExtension Members void IPolicyExportExtension.ExportPolicy(MetadataExporter exporter, PolicyConversionContext context) { if (exporter == null) { throw new ArgumentNullException("exporter"); } if (context == null) { throw new ArgumentNullException("context"); } ICollection<XmlElement> bindingAssertions = context.GetBindingAssertions(); XmlDocument xmlDocument = new XmlDocument(); bindingAssertions.Add(xmlDocument.CreateElement( SsbConstants.SsbNsPrefix, SsbConstants.SsbTransportAssertion, SsbConstants.SsbNs)); bool createdNew = false; MessageEncodingBindingElement encodingBindingElement = context.BindingElements.Find<MessageEncodingBindingElement>(); if (encodingBindingElement == null) { createdNew = true; encodingBindingElement = SsbConstants.DefaultMessageEncodingBindingElement; } if (createdNew && encodingBindingElement is IPolicyExportExtension) { ((IPolicyExportExtension)encodingBindingElement).ExportPolicy(exporter, context); } AddWSAddressingAssertion(context, encodingBindingElement.MessageVersion.Addressing); } #endregion static void AddWSAddressingAssertion(PolicyConversionContext context, AddressingVersion addressing) { XmlElement addressingAssertion = null; if (addressing == AddressingVersion.WSAddressing10) { addressingAssertion = XmlDoc.CreateElement("wsaw", "UsingAddressing", "http://www.w3.org/2006/05/addressing/wsdl"); } else if (addressing == AddressingVersion.WSAddressingAugust2004) { addressingAssertion = XmlDoc.CreateElement("wsap", "UsingAddressing", AddressingVersionConstants.WSAddressingAugust2004NameSpace + "/policy"); } else if (addressing == AddressingVersion.None) { // do nothing addressingAssertion = null; } else { throw new InvalidOperationException("This addressing version is not supported:\n" + addressing.ToString()); } if (addressingAssertion != null) { context.GetBindingAssertions().Add(addressingAssertion); } } #region IWsdlExportExtension Members void IWsdlExportExtension.ExportContract(WsdlExporter exporter, WsdlContractConversionContext context) { } void IWsdlExportExtension.ExportEndpoint(WsdlExporter exporter, WsdlEndpointConversionContext context) { BindingElementCollection bindingElements = context.Endpoint.Binding.CreateBindingElements(); MessageEncodingBindingElement encodingBindingElement = bindingElements.Find<MessageEncodingBindingElement>(); if (encodingBindingElement == null) { encodingBindingElement = SsbConstants.DefaultMessageEncodingBindingElement; } // Set SoapBinding Transport URI WsdlNS.SoapBinding soapBinding = GetSoapBinding(context, exporter); if (soapBinding != null) { soapBinding.Transport = SsbConstants.SsbNs; } if (context.WsdlPort != null) { AddAddressToWsdlPort(context.WsdlPort, context.Endpoint.Address, encodingBindingElement.MessageVersion.Addressing); } } #endregion private static WsdlNS.SoapBinding GetSoapBinding(WsdlEndpointConversionContext endpointContext, WsdlExporter exporter) { EnvelopeVersion envelopeVersion = null; WsdlNS.SoapBinding existingSoapBinding = null; object versions = null; object SoapVersionStateKey = new object(); //get the soap version state if (exporter.State.TryGetValue(SoapVersionStateKey, out versions)) { Dictionary<WsdlNS.Binding, EnvelopeVersion> vd = (Dictionary<WsdlNS.Binding, EnvelopeVersion>)versions; if (versions != null && vd.ContainsKey(endpointContext.WsdlBinding)) { envelopeVersion = vd[endpointContext.WsdlBinding]; } } if (envelopeVersion == EnvelopeVersion.None) { return null; } //get existing soap binding foreach (object o in endpointContext.WsdlBinding.Extensions) { if (o is WsdlNS.SoapBinding) { existingSoapBinding = (WsdlNS.SoapBinding)o; } } return existingSoapBinding; } private static void AddAddressToWsdlPort(WsdlNS.Port wsdlPort, EndpointAddress endpointAddress, AddressingVersion addressing) { if (addressing == AddressingVersion.None) { return; } MemoryStream memoryStream = new MemoryStream(); XmlWriter xmlWriter = XmlWriter.Create(memoryStream); xmlWriter.WriteStartElement("temp"); if (addressing == AddressingVersion.WSAddressing10) { xmlWriter.WriteAttributeString("xmlns", "wsa10", null, AddressingVersionConstants.WSAddressing10NameSpace); } else if (addressing == AddressingVersion.WSAddressingAugust2004) { xmlWriter.WriteAttributeString("xmlns", "wsa", null, AddressingVersionConstants.WSAddressingAugust2004NameSpace); } else { throw new InvalidOperationException("This addressing version is not supported:\n" + addressing.ToString()); } endpointAddress.WriteTo(addressing, xmlWriter); xmlWriter.WriteEndElement(); xmlWriter.Flush(); memoryStream.Seek(0, SeekOrigin.Begin); XmlReader xmlReader = XmlReader.Create(memoryStream); xmlReader.MoveToContent(); XmlElement endpointReference = (XmlElement)XmlDoc.ReadNode(xmlReader).ChildNodes[0]; wsdlPort.Extensions.Add(endpointReference); } //reflects the structure of the wsdl static XmlDocument XmlDoc { get { if (xmlDocument == null) { NameTable nameTable = new NameTable(); nameTable.Add("Policy"); nameTable.Add("All"); nameTable.Add("ExactlyOne"); nameTable.Add("PolicyURIs"); nameTable.Add("Id"); nameTable.Add("UsingAddressing"); nameTable.Add("UsingAddressing"); xmlDocument = new XmlDocument(nameTable); } return xmlDocument; } } } }
using System; using System.Data; using System.Drawing; using System.Linq; using System.Windows.Forms; using bv.common; using bv.common.Core; using bv.common.db; using bv.common.db.Core; using bv.common.Enums; using bv.common.Objects; using bv.common.win; using bv.model.Model.Core; using bv.winclient.Core; using bv.winclient.Layout; using DevExpress.XtraEditors.Controls; using DevExpress.XtraEditors.Mask; using DevExpress.XtraNavBar; using DevExpress.XtraTreeList; using DevExpress.XtraTreeList.Nodes; using EIDSS; using eidss.avr.BaseComponents; using eidss.avr.db.DBService.QueryBuilder; using eidss.avr.db.Interfaces; using EIDSS.Core; using eidss.model.Avr.Commands; using eidss.model.Avr.Commands.Layout; using eidss.model.Avr.Tree; using eidss.model.Core; using eidss.model.Enums; using eidss.model.Resources; using BaseReferenceType = bv.common.db.BaseReferenceType; using Localizer = bv.common.Core.Localizer; namespace eidss.avr.QueryBuilder { public sealed partial class QueryDetailPanel : BaseAvrDetailPresenterPanel, IQueryDetailView { private QueryDetailPresenter m_QueryDetailPresenter; #region Init public ChildQueryObjectList m_ObjectList; private readonly Query_DB m_QueryDbService; private bool m_Modified; private readonly bool m_ReadOnlyMode; private bool m_IsNewOrCopy; public QueryDetailPanel() { LayoutCorrector.Reset(); InitializeComponent(); if (IsDesignMode() || bv.common.Configuration.BaseSettings.ScanFormsMode) return; m_QueryDbService = new Query_DB(); DbService = m_QueryDbService; RegisterChildObject(qsoRoot, RelatedPostOrder.PostLast); m_ObjectList = new ChildQueryObjectList( qsoRoot.Parent, qsoRoot.Left, qsoRoot.Top, qsoRoot.Width, qsoRoot.Height, qsoRoot.TabIndex); AuditObject = new AuditObject((long) EIDSSAuditObject.daoReference, (long) AuditTable.tasQuery); LookupTableNames = new[] {"Query"}; m_QueryDetailPresenter = (QueryDetailPresenter) SharedPresenter[this]; if (WinUtils.IsComponentInDesignMode(this)) { return; } UpdateGeneralQueryInfoView(); RtlHelper.SetRTL(this); m_Modified = false; } public QueryDetailPanel(bool aReadOnlyMode) : this() { m_ReadOnlyMode = aReadOnlyMode; } /// <summary> /// Clean up any resources being used. /// </summary> /// <param name="disposing"> true if managed resources should be disposed; otherwise, false. </param> protected override void Dispose(bool disposing) { try { if (m_QueryDetailPresenter != null) { if (m_QueryDetailPresenter.SharedPresenter != null) { m_QueryDetailPresenter.SharedPresenter.UnregisterView(this); } m_QueryDetailPresenter.Dispose(); m_QueryDetailPresenter = null; } if (eventManager != null) { eventManager.ClearAllReferences(); } if (disposing && (components != null)) { components.Dispose(); } } finally { base.Dispose(disposing); } } #region Command handlers public void ProcessQueryLayoutCommand(QueryLayoutCommand queryLayoutCommand) { queryLayoutCommand.State = CommandState.Pending; switch (queryLayoutCommand.Operation) { case QueryLayoutOperation.CopyQuery: cmdCopy_Click(this, EventArgs.Empty); break; case QueryLayoutOperation.Publish: ProcessAfterPost(() => PublishUnpublishAfterPost(true)); break; case QueryLayoutOperation.Unpublish: ProcessAfterPost(() => PublishUnpublishAfterPost(false)); break; default: queryLayoutCommand.State = CommandState.Unprocessed; break; } if (queryLayoutCommand.State == CommandState.Pending) { queryLayoutCommand.State = CommandState.Processed; } } private void PublishUnpublishAfterPost(bool isPublish) { if (UserConfirmPublishUnpublish(AvrTreeElementType.Query, isPublish)) { object id = m_QueryDbService.ID; m_QueryDetailPresenter.QueryDbService.PublishUnpublish((long)id, isPublish); LoadData(ref id); } } #endregion private void UpdateGeneralQueryInfoView() { PermissionObject = EIDSSPermissionObject.AVRReport; cmdCopy.Enabled = AvrPermissions.InsertPermission; if (Utils.IsEmpty(lblQueryName.Text) == false) { int bracketInd = lblQueryName.Text.IndexOf("(", StringComparison.Ordinal); if (bracketInd >= 0) { lblQueryName.Text = lblQueryName.Text.Substring(0, bracketInd).Trim(); } lblQueryName.Text = lblQueryName.Text + @" (" + Localizer.GetLanguageName(ModelUserContext.CurrentLanguage) + @")"; } const int step = 4; if (ModelUserContext.CurrentLanguage == Localizer.lngEn) { int curTop = lblDefQueryName.Top + lblDefQueryName.Height + step; lblQueryName.Enabled = false; lblQueryName.Visible = false; SetMandatoryFieldVisible(txtQueryName, false); lblDescription.Top = curTop; memDescription.Top = curTop; memDescription.Height = grpGeneralInfo.GroupClientHeight - curTop - 2*step; chAddAllKeyFieldValues.Top = curTop; } else { int curTop = lblDefQueryName.Top + lblDefQueryName.Height + step; lblQueryName.Enabled = true; lblQueryName.Visible = true; lblQueryName.Top = curTop; SetMandatoryFieldVisible(txtQueryName, true); txtQueryName.Top = curTop; curTop = curTop + lblQueryName.Height + step; lblDescription.Top = curTop; memDescription.Top = curTop; memDescription.Height = grpGeneralInfo.GroupClientHeight - curTop - 2*step; chAddAllKeyFieldValues.Top = curTop; } } #endregion #region Keys public override object GetKey(string tableName = null, string keyFieldName = null) { return m_QueryDbService.ID; } public override object GetChildKey(IRelatedObject child) { if (child is QuerySearchObjectInfo) { var qsoInfo = child as QuerySearchObjectInfo; if (baseDataSet.Tables.Contains(Query_DB.TasQueryObjectTree) && (baseDataSet.Tables[Query_DB.TasQueryObjectTree].Rows.Count > 0)) { DataRow[] dr = baseDataSet.Tables[Query_DB.TasQueryObjectTree].Select( string.Format("idfsSearchObject = '{0}'", qsoInfo.SearchObject), "intOrder", DataViewRowState.OriginalRows | DataViewRowState.Added); if (dr.Length > 0) { return (dr[0].RowState != DataRowState.Added) ? dr[0]["idfQuerySearchObject", DataRowVersion.Original] : dr[0]["idfQuerySearchObject"]; } } return null; } return GetKey(Query_DB.TasQuery, "idflQuery"); } #endregion #region Bindings private bool m_CopyDefaultQueryName; private void BindQueryInfo() { m_CopyDefaultQueryName = m_QueryDbService.IsNewObject; LookupBinder.BindTextEdit(txtDefQueryName, baseDataSet, Query_DB.TasQuery + ".DefQueryName"); txtDefQueryName.Properties.Mask.MaskType = MaskType.RegEx; txtDefQueryName.Properties.Mask.EditMask = @"[a-zA-Z0-9~-+!-) \*\.,\-@\^_?/`=<>\|\\]*"; txtDefQueryName.Properties.Mask.UseMaskAsDisplayFormat = true; LookupBinder.BindTextEdit(txtQueryName, baseDataSet, Query_DB.TasQuery + ".QueryName"); LookupBinder.BindTextEdit(memDescription, baseDataSet, Query_DB.TasQuery + ".QueryDescription"); memDescription.Properties.NullText = ""; memDescription.Properties.MaxLength = 2000; chAddAllKeyFieldValues.DataBindings.Clear(); chAddAllKeyFieldValues.DataBindings.Add("EditValue", baseDataSet, Query_DB.TasQuery + ".blnAddAllKeyFieldValues"); } private void BindQueryObjectTree() { trlQuery.DataSource = new DataView(baseDataSet.Tables[Query_DB.TasQueryObjectTree]); trlQuery.KeyFieldName = "idfQuerySearchObject"; trlQuery.ParentFieldName = "idfParentQuerySearchObject"; LookupBinder.BindBaseRepositoryLookup(cbSearchObject, BaseReferenceType.rftSearchObject, false); foreach (EditorButton btn in cbSearchObject.Buttons) { btn.Visible = false; } trlQuery.ExpandAll(); if (trlQuery.Nodes.Count > 0) { BeginUpdate(); trlQuery.FocusedNode = trlQuery.Nodes[0]; EndUpdate(); } if (ReadOnly) { trlQuery.OptionsBehavior.Editable = false; } } /// <summary> /// Check if the user has permission to the specified AVR search object /// </summary> /// <param name="searchObjectId">Id of the search object to check permissions</param> /// <returns>Returns boolean value. True if the user has permission to the object, otherwise - false.</returns> private bool HasUserPermissionToTheObject ( object searchObjectId ) { DataView dvSearchObjectToSystemFunction = LookupCache.Get(LookupTables.SearchObjectToSystemFunction); bool okToAddMenuItem = true; long soId; if ((searchObjectId == null) || (!(long.TryParse(Utils.Str(searchObjectId), out soId)))) { return (false); } if (dvSearchObjectToSystemFunction != null) { dvSearchObjectToSystemFunction.RowFilter = string.Format("idfsSearchObject = {0}", Utils.Str(searchObjectId)); if (dvSearchObjectToSystemFunction.Count > 0) { foreach (DataRowView drSearchObjectToSystemFunction in dvSearchObjectToSystemFunction) { EIDSSPermissionObject sf; if ( !Enum.TryParse( Utils.Str(drSearchObjectToSystemFunction["idfsSystemFunction"]), true, out sf)) { continue; } if ((EidssUserContext.User.HasPermission(PermissionHelper.SelectPermission(sf)))) { continue; } okToAddMenuItem = false; break; } } } return (okToAddMenuItem); } private void InitQueryTypeMenu() { mnuQueryType.Items.Clear(); using (DataView dv = LookupCache.Get(LookupTables.SearchObject)) { foreach (DataRowView r in dv) { if (!Utils.IsEmpty(r["blnPrimary"]) && ((bool) r["blnPrimary"])) { var it = new ToolStripMenuItem { Name = "so" + Utils.Str(r["idfsSearchObject"]), Tag = r["idfsSearchObject"], Text = Utils.Str(r["Name"]) }; if (HasUserPermissionToTheObject(r["idfsSearchObject"])) { mnuQueryType.Items.Add(it); } } } } UpdateQueryTypeMenu(); } private void UpdateQueryTypeMenu() { for (int i = 0; i < mnuQueryType.Items.Count; i++) { mnuQueryType.Items[i].Enabled = true; } DataRow r = RootObjectRow; if (r != null) { ToolStripItem curRoot = mnuQueryType.Items["so" + Utils.Str(r["idfsSearchObject"])]; if (curRoot != null) { curRoot.Enabled = false; } } FillChildObjectMenu(); } private void FillChildObjectMenu() { mnuChildObject.Items.Clear(); DataRow rRoot = RootObjectRow; if ((rRoot == null) || Utils.IsEmpty(rRoot["idfsSearchObject"])) { return; } using (DataView dv = LookupCache.Get(LookupTables.SearchObjectRelation)) { dv.RowFilter = string.Format("idfsParentSearchObject = '{0}' ", rRoot["idfsSearchObject"]); foreach (DataRowView r in dv) { if (Utils.Str(r["idfsParentSearchObject"]) == Utils.Str(rRoot["idfsSearchObject"])) { var it = new ToolStripMenuItem { Name = "so" + Utils.Str(r["idfsChildSearchObject"]), Tag = r["idfsChildSearchObject"], Text = Utils.Str(r["ChildSearchObjectName"]) }; if (HasUserPermissionToTheObject(r["idfsChildSearchObject"])) { mnuChildObject.Items.Add(it); } } } } UpdateChildObjectMenu(); } private void UpdateChildObjectMenu() { for (int i = 0; i < mnuChildObject.Items.Count; i++) { mnuChildObject.Items[i].Enabled = true; } btnAddChildObject.Enabled = true; if (baseDataSet.Tables.Contains(Query_DB.TasQueryObjectTree) && (baseDataSet.Tables[Query_DB.TasQueryObjectTree].Rows.Count > 0)) { DataRow[] dr = baseDataSet.Tables[Query_DB.TasQueryObjectTree].Select("idfParentQuerySearchObject is not null", "", DataViewRowState.CurrentRows); foreach (DataRow r in dr) { ToolStripItem curObj = mnuChildObject.Items["so" + Utils.Str(r["idfsSearchObject"])]; if (curObj != null) { curObj.Enabled = false; } } btnAddChildObject.Enabled = CanAddSearchObject && (!ReadOnly); } } private void InitRootObject() { DataRow r = RootObjectRow; if (r != null) { long aSearchObject = QuerySearchObjectInfo.DefaultSearchObject; if ((!Utils.IsEmpty(r["idfsSearchObject"])) && (r["idfsSearchObject"] is long) && ((long) r["idfsSearchObject"] != -1L)) { aSearchObject = (long) r["idfsSearchObject"]; } int aOrder = 0; if (Utils.IsEmpty(r["intOrder"]) == false) { aOrder = (int) r["intOrder"]; } qsoRoot.SearchObject = aSearchObject; qsoRoot.Order = aOrder; } } // private bool m_Copying; private void InitChildObjectList() { // Commented because m_Copying is always false, this code never executing // if (m_Copying) // { // qsoRoot.Visible = false; // m_ObjectList.SetAllSearchObjectsInVisible(); // qsoRoot.Visible = true; // btnRemoveChildObject.Enabled = (m_ObjectList.Count > 0) && (!ReadOnly); // return; // } m_ObjectList.Clear(); qsoRoot.Visible = false; if (baseDataSet.Tables.Contains(Query_DB.TasQueryObjectTree) && (baseDataSet.Tables[Query_DB.TasQueryObjectTree].Rows.Count > 0)) { DataRow[] dr = baseDataSet.Tables[Query_DB.TasQueryObjectTree].Select("idfParentQuerySearchObject is not null", "intOrder", DataViewRowState.CurrentRows); foreach (DataRow r in dr) { var aSearchObject = (long) r["idfsSearchObject"]; int aOrder = 0; if (Utils.IsEmpty(r["intOrder"]) == false) { aOrder = (int) r["intOrder"]; } QuerySearchObjectInfo qsoInfo = m_ObjectList.Add(aSearchObject, aOrder); RegisterChildObject(qsoInfo, RelatedPostOrder.PostLast); //object id = r["idfQuerySearchObject"]; //qsoInfo.LoadData(ref id); if (ReadOnly) { qsoInfo.ReadOnly = true; } } } m_ObjectList.SetAllSearchObjectsInVisible(); qsoRoot.Visible = true; btnRemoveChildObject.Enabled = (m_ObjectList.Count > 0) && (!ReadOnly); } private void RegisterObjectInfo() { if (baseDataSet.Tables.Contains(Query_DB.TasQueryObjectTree) && (baseDataSet.Tables[Query_DB.TasQueryObjectTree].Rows.Count > 0)) { DataRow[] dr = baseDataSet.Tables[Query_DB.TasQueryObjectTree].Select("idfParentQuerySearchObject is not null", "intOrder", DataViewRowState.CurrentRows); foreach (DataRow r in dr) { QuerySearchObjectInfo qsoInfo = m_ObjectList.Item((long) r["idfsSearchObject"]); RegisterChildObject(qsoInfo, RelatedPostOrder.PostLast); } } } protected override void DefineBinding() { BindQueryInfo(); BindQueryObjectTree(); InitQueryTypeMenu(); InitRootObject(); InitChildObjectList(); } public bool QueryPost(PostType postType = PostType.FinalPosting) { return m_QueryDetailPresenter.SharedPresenter.SharedModel.ParentForm.Post(postType); } /// <summary> /// Don't Call this method directly!!! It should be called when parent form performs post. Use QueryPost() instead. /// </summary> /// <param name="postType"></param> /// <returns></returns> public override bool Post(PostType postType = PostType.FinalPosting) { bool post = base.Post(postType); if (post) { m_IsNewOrCopy = false; } return post; } public override bool HasChanges() { return m_IsNewOrCopy || base.HasChanges(); } private void QueryDetail_AfterLoadData(object sender, EventArgs e) { RegisterObjectInfo(); if ((baseDataSet != null) && baseDataSet.Tables.Contains(Query_DB.TasQuery) && (baseDataSet.Tables[Query_DB.TasQuery].Rows.Count > 0) && (Utils.IsEmpty(baseDataSet.Tables[Query_DB.TasQuery].Rows[0]["idflQuery"]) == false) && (baseDataSet.Tables[Query_DB.TasQuery].Rows[0]["blnReadOnly"] is bool) && (bool) baseDataSet.Tables[Query_DB.TasQuery].Rows[0]["blnReadOnly"]) { ReadOnly = true; } else if (ReadOnly != m_ReadOnlyMode) { ReadOnly = m_ReadOnlyMode; } } #endregion #region Properties public override bool ReadOnly { get { return base.ReadOnly; } set { value = value || m_ReadOnlyMode; base.ReadOnly = value; cmdNew.Enabled = AvrPermissions.InsertPermission; cmdDelete.Enabled = (!value) && AvrPermissions.DeletePermission; cmdSave.Enabled = (!value) && AvrPermissions.UpdatePermission; cmdCopy.Enabled = AvrPermissions.InsertPermission; cmdCancelChanges.Enabled = (!value); cmdClose.Enabled = true; qsoRoot.ReadOnly = value; btnRemoveChildObject.Enabled = false; if (m_ObjectList != null) { for (int aOrder = 1; aOrder <= m_ObjectList.Count; aOrder++) { QuerySearchObjectInfo qsoInfo = m_ObjectList.Item(aOrder); qsoInfo.ReadOnly = value; } btnRemoveChildObject.Enabled = (!value) && (m_ObjectList.Count > 0); } btnEditQueryType.Enabled = !value; btnAddChildObject.Enabled = (!value) && CanAddSearchObject; } } public bool Modified { get { return m_Modified; } } private bool CanAddSearchObject { get { int disabledCount = mnuChildObject.Items .Cast<ToolStripItem>() .Count(curObj => curObj.Enabled == false); return (disabledCount < mnuChildObject.Items.Count); } } private DataRow RootObjectRow { get { if (baseDataSet.Tables.Contains(Query_DB.TasQueryObjectTree) && (baseDataSet.Tables[Query_DB.TasQueryObjectTree].Rows.Count > 0)) { DataRow[] dr = baseDataSet.Tables[Query_DB.TasQueryObjectTree].Select("idfParentQuerySearchObject is null", "", DataViewRowState.CurrentRows); if (dr.Length > 0) { return dr[0]; } } return null; } } private long NewNegativeQuerySearchObjectID { get { if (baseDataSet.Tables.Contains(Query_DB.TasQueryObjectTree) == false) { return -1; } long querySearchObjectID = -2; DataRow[] r = baseDataSet.Tables[Query_DB.TasQueryObjectTree].Select( string.Format("idfQuerySearchObject <= {0}", querySearchObjectID), "idfQuerySearchObject"); if (r.Length > 0) { if (r[0].RowState != DataRowState.Deleted) { querySearchObjectID = (long) (r[0]["idfQuerySearchObject"]) - 1; } else { querySearchObjectID = (long) (r[0]["idfQuerySearchObject", DataRowVersion.Original]) - 1; } } return querySearchObjectID; } } #endregion #region Handlers private void txtDefQueryName_EditValueChanged(object sender, EventArgs e) { if ((Loading == false) && m_CopyDefaultQueryName) { txtQueryName.Text = txtDefQueryName.Text; } RaiseRefreshCaption(); } private void txtQueryName_EditValueChanged(object sender, EventArgs e) { if ((Loading == false) && (txtQueryName.Text != txtDefQueryName.Text)) { m_CopyDefaultQueryName = false; } RaiseRefreshCaption(); } private void btnEditQueryType_Click(object sender, EventArgs e) { if (mnuQueryType.Items.Count > 0) { mnuQueryType.Show(btnEditQueryType, new Point(0, btnEditQueryType.Height)); } } private void mnuQueryType_ItemClicked(object sender, ToolStripItemClickedEventArgs e) { if (qsoRoot.SearchObject != (long) e.ClickedItem.Tag) { if (baseDataSet.Tables.Contains(Query_DB.TasQueryObjectTree) && (baseDataSet.Tables[Query_DB.TasQueryObjectTree].Rows.Count > 0)) { for (int i = baseDataSet.Tables[Query_DB.TasQueryObjectTree].Rows.Count - 1; i >= 0; i--) { DataRow r = baseDataSet.Tables[Query_DB.TasQueryObjectTree].Rows[i]; if (r.RowState != DataRowState.Deleted) { if (Utils.IsEmpty(r["idfParentQuerySearchObject"]) == false) { QuerySearchObjectInfo qsoInfo = m_ObjectList.Item((long) r["idfsSearchObject"]); if (qsoInfo != null) { UnRegisterChildObject(qsoInfo); m_ObjectList.Remove(qsoInfo); } r.Delete(); } else { r["idfsSearchObject"] = (long) e.ClickedItem.Tag; qsoRoot.SearchObject = (long) e.ClickedItem.Tag; } } } } } mnuQueryType.Close(); UpdateQueryTypeMenu(); btnAddChildObject.Enabled = CanAddSearchObject && (!ReadOnly); btnRemoveChildObject.Enabled = (m_ObjectList.Count > 0) && (!ReadOnly); } private void btnAddChildObject_Click(object sender, EventArgs e) { if (trlQuery.Nodes.Count == 0) { return; } BeginUpdate(); trlQuery.FocusedNode = trlQuery.Nodes[0]; EndUpdate(); if (mnuChildObject.Items.Count > 0) { mnuChildObject.Show(btnAddChildObject, new Point(0, btnAddChildObject.Height)); } } private void mnuChildObject_ItemClicked(object sender, ToolStripItemClickedEventArgs e) { AddChildObject((long) e.ClickedItem.Tag); mnuChildObject.Close(); UpdateChildObjectMenu(); btnAddChildObject.Enabled = CanAddSearchObject && (!ReadOnly); btnRemoveChildObject.Enabled = (m_ObjectList.Count > 0) && (!ReadOnly); } private void AddChildObject(long aSearchObject) { BeginUpdate(); DataRow rRoot = RootObjectRow; if ((rRoot != null) && (Utils.IsEmpty(aSearchObject) == false)) { object newObjID = NewNegativeQuerySearchObjectID; DataRow rChild = baseDataSet.Tables[Query_DB.TasQueryObjectTree].NewRow(); rChild["idflQuery"] = m_QueryDbService.ID; rChild["idfParentQuerySearchObject"] = rRoot["idfQuerySearchObject"]; rChild["idfsSearchObject"] = aSearchObject; rChild["idfQuerySearchObject"] = newObjID; baseDataSet.Tables[Query_DB.TasQueryObjectTree].Rows.Add(rChild); qsoRoot.Visible = false; QuerySearchObjectInfo qsoInfo = m_ObjectList.Add(aSearchObject); rChild["intOrder"] = qsoInfo.Order; RegisterChildObject(qsoInfo, RelatedPostOrder.PostLast); qsoInfo.LoadData(ref newObjID); TreeListNode node = trlQuery.FindNodeByKeyID(rChild["idfQuerySearchObject"]); if (node != null) { node.ParentNode.ExpandAll(); trlQuery.FocusedNode = node; trlQuery.FocusedColumn = trlQuery.Columns["idfsSearchObject"]; m_ObjectList.SetSearchObjectVisible(aSearchObject); } else { m_ObjectList.SetAllSearchObjectsInVisible(); qsoRoot.Visible = true; } } EndUpdate(); } private void UpdateChildOrder() { if ((baseDataSet == null) || (baseDataSet.Tables.Contains(Query_DB.TasQueryObjectTree) == false) || (m_ObjectList == null)) { return; } foreach (DataRow r in baseDataSet.Tables[Query_DB.TasQueryObjectTree].Rows) { if (r.RowState != DataRowState.Deleted) { if (m_ObjectList.Contains((long) r["idfsSearchObject"])) { r["intOrder"] = m_ObjectList.Item((long) r["idfsSearchObject"]).Order; } } } } private void btnRemoveChildObject_Click(object sender, EventArgs e) { if ((trlQuery.FocusedNode == null) || (trlQuery.FocusedNode.Level == 0)) { return; } if (DeletePromptDialog() != DialogResult.Yes) { return; } BeginUpdate(); QuerySearchObjectInfo qsoInfo = m_ObjectList.Item((long) trlQuery.FocusedNode.GetValue(trlQuery.Columns["idfsSearchObject"])); if (qsoInfo != null) { UnRegisterChildObject(qsoInfo); m_ObjectList.Remove(qsoInfo); UpdateChildOrder(); } trlQuery.DeleteNode(trlQuery.FocusedNode); EndUpdate(); UpdateChildObjectMenu(); btnAddChildObject.Enabled = CanAddSearchObject && (!ReadOnly); btnRemoveChildObject.Enabled = (m_ObjectList.Count > 0) && (!ReadOnly); } private void trlQuery_AfterFocusNode(object sender, NodeEventArgs e) { if ((e.Node == null) || (e.Node.Level == 0)) { m_ObjectList.SetAllSearchObjectsInVisible(); qsoRoot.Visible = true; } else { var aSearchObject = (long) (trlQuery.FocusedNode.GetValue(trlQuery.Columns["idfsSearchObject"])); if (m_ObjectList.Contains(aSearchObject)) { qsoRoot.Visible = false; m_ObjectList.SetSearchObjectVisible(aSearchObject); } else { m_ObjectList.SetAllSearchObjectsInVisible(); qsoRoot.Visible = true; } } } private bool IsBlankQuery() { if ((baseDataSet == null) || (baseDataSet.Tables.Contains(Query_DB.TasQuery) == false) || (baseDataSet.Tables[Query_DB.TasQuery].Rows.Count == 0)) { return true; } return Utils.IsEmpty(baseDataSet.Tables[Query_DB.TasQuery].Rows[0]["DefQueryName"]); } private bool m_ShowClearFormPrompt = true; private DataRow GetObjectRowBySearchObject(long aSearchObject) { if (baseDataSet.Tables.Contains(Query_DB.TasQueryObjectTree) == false) { return null; } DataRow[] dr = baseDataSet.Tables[Query_DB.TasQueryObjectTree].Select(string.Format("idfsSearchObject = {0}", aSearchObject)); if (dr.Length == 0) { return null; } // Find row that could be edited DataRow r = baseDataSet.Tables[Query_DB.TasQueryObjectTree].Rows.Find(dr[0]["idfQuerySearchObject"]); return r; } private bool DefaultQueryNameExists(string defQueryName) { LookupTableInfo info = LookupCache.LookupTables[LookupTables.Query.ToString()]; DataTable dtQuery = LookupCache.Fill(info); DataRow[] dr = dtQuery.Select(string.Format("DefQueryName = '{0}'", Utils.Str(defQueryName).Replace("'", "''"))); if (dr.Length > 1) { return true; } if ((dr.Length == 1) && ((long) dr[0]["idflQuery"] != (long) m_QueryDbService.ID)) { return true; } return false; } private bool QueryNameExists(string queryName) { LookupTableInfo info = LookupCache.LookupTables[LookupTables.Query.ToString()]; DataTable dtQuery = LookupCache.Fill(info); DataRow[] dr = dtQuery.Select(string.Format("QueryName = '{0}'", Utils.Str(queryName).Replace("'", "''"))); if (dr.Length > 1) { return true; } if ((dr.Length == 1) && ((long) dr[0]["idflQuery"] != (long) m_QueryDbService.ID)) { return true; } return false; } private object GetCopyDefaultQueryName(object defQueryName) { if (Utils.IsEmpty(defQueryName)) { return defQueryName; } string copyPrefix = EidssMessages.Get("msgCopyPrefix", "Copy{0} of", "en"); int copyIndex = 0; //-1; string copyDefQueryName = string.Format("{0} {1}", string.Format(Utils.Str(copyPrefix).Trim(), ""), Utils.Str(defQueryName)); //Utils.Str(defQueryName); bool isNameReady = false; while (isNameReady == false) { if (DefaultQueryNameExists(copyDefQueryName)) { copyIndex = copyIndex + 1; if (copyIndex > 0) { copyDefQueryName = string.Format("{0} {1}", string.Format(Utils.Str(copyPrefix).Trim(), string.Format(" ({0})", copyIndex)), Utils.Str(defQueryName)); } else { copyDefQueryName = string.Format("{0} {1}", string.Format(Utils.Str(copyPrefix).Trim(), ""), Utils.Str(defQueryName)); } } else { isNameReady = true; } } return copyDefQueryName; } private object GetCopyQueryName(object queryName) { if (Utils.IsEmpty(queryName)) { return queryName; } string copyPrefix = EidssMessages.Get("msgCopyPrefix", "Copy{0} of"); int copyIndex = 0; //-1; string copyQueryName = string.Format("{0} {1}", string.Format(Utils.Str(copyPrefix).Trim(), ""), Utils.Str(queryName)); //Utils.Str(queryName); bool isNameReady = false; while (isNameReady == false) { if (QueryNameExists(copyQueryName)) { copyIndex = copyIndex + 1; if (copyIndex > 0) { copyQueryName = string.Format("{0} {1}", string.Format(Utils.Str(copyPrefix).Trim(), string.Format(" ({0})", copyIndex)), Utils.Str(queryName)); } else { copyQueryName = string.Format("{0} {1}", string.Format(Utils.Str(copyPrefix).Trim(), ""), Utils.Str(queryName)); } } else { isNameReady = true; } } return copyQueryName; } #endregion #region Validate Methods private DataTable m_SearchTable; private void InitSearchTable() { m_SearchTable = new DataTable("SearchTable"); var colPk = new DataColumn("idfSearchTableForUnionTable", typeof (String)); m_SearchTable.Columns.Add(colPk); var colID = new DataColumn("idfSearchTable", typeof (Int64)); m_SearchTable.Columns.Add(colID); var colName = new DataColumn("strTableName", typeof (String)); m_SearchTable.Columns.Add(colName); var colParentID = new DataColumn("idfParentSearchTable", typeof (Int64)); m_SearchTable.Columns.Add(colParentID); var colUnionID = new DataColumn("idfUnionSearchTable", typeof (Int64)); m_SearchTable.Columns.Add(colUnionID); var colCount = new DataColumn("intTableCount", typeof (Int32)); m_SearchTable.Columns.Add(colCount); m_SearchTable.PrimaryKey = new[] {m_SearchTable.Columns["idfSearchTableForUnionTable"]}; } private int GetQueryComplexity() { int complexity = 1; if (Utils.IsEmpty(qsoRoot.SearchObject)) { return complexity; } // Union tables for query DataView dvMainTableForObject = LookupCache.Get(LookupTables.MainTableForObject); if ((dvMainTableForObject == null) || (dvMainTableForObject.Count == 0)) { return complexity; } dvMainTableForObject.RowFilter = string.Format("idfsSearchObject = {0} ", qsoRoot.SearchObject); // Main table for root object in case root object is connected just to one main table object idfUniqueMainSearchTable = null; if (dvMainTableForObject.Count == 0) { return complexity; } if (dvMainTableForObject.Count == 1) { idfUniqueMainSearchTable = dvMainTableForObject[0]["idfMainSearchTable"]; if (Utils.IsEmpty(idfUniqueMainSearchTable)) { return complexity; } } // Determine union tables for query foreach (IRelatedObject qsoTable in Children) { if (qsoTable is QuerySearchObjectInfo) { var qsoMainTableInfo = qsoTable as QuerySearchObjectInfo; if (Utils.IsEmpty(qsoMainTableInfo.SearchObject) == false) { dvMainTableForObject.RowFilter = string.Format("idfsSearchObject = {0} ", qsoMainTableInfo.SearchObject); if (dvMainTableForObject.Count > 1) { break; } } } } //DataRow rRootObject = LookupCache.GetLookupRow(qsoRoot.SearchObject, LookupTables.SearchObject.ToString()); //if ((rRootObject == null) || (Utils.IsEmpty(rRootObject["idfMainSearchTable"]))) { return complexity; } //long idfMainSearchTable = (long)(rRootObject["idfMainSearchTable"]); // Define new table with all search tables of the query with specified union table InitSearchTable(); foreach (IRelatedObject qso in Children) { if (qso is QuerySearchObjectInfo) { var qsoInfo = qso as QuerySearchObjectInfo; DataView dvField = qsoInfo.GetQuerySearchFieldView(); if (dvField != null) { foreach (DataRowView r in dvField) { DataRow rFieldLookup = LookupCache.GetLookupRow(r["idfsSearchField"], LookupTables.SearchField.ToString()); if (rFieldLookup != null) { foreach (DataRowView rUnionTable in dvMainTableForObject) { object unionSearchTableID = rUnionTable["idfUnionSearchTable"]; object mainSearchTableID = idfUniqueMainSearchTable; if (Utils.IsEmpty(idfUniqueMainSearchTable)) { mainSearchTableID = unionSearchTableID; } DataRow rFieldSource = LookupCache.GetLookupRow( string.Format("{0}__{1}", rFieldLookup["idfsSearchField"], unionSearchTableID), LookupTables.FieldSourceForTable.ToString()); if (rFieldSource != null) { DataRow rLookup = LookupCache.GetLookupRow( string.Format("{0}__{1}__{2}", unionSearchTableID, mainSearchTableID, rFieldSource["idfFieldSourceTable"]), LookupTables.SearchTable.ToString()); if (rLookup != null) { object searchTableID = rLookup["idfSearchTable"]; string searchTableForUnionTableID = string.Format("{0}__{1}", unionSearchTableID, searchTableID); if (m_SearchTable.Rows.Find(searchTableForUnionTableID) == null) { DataRow dr = m_SearchTable.NewRow(); dr["idfSearchTableForUnionTable"] = searchTableForUnionTableID; dr["idfSearchTable"] = searchTableID; dr["idfUnionSearchTable"] = unionSearchTableID; dr["strTableName"] = rLookup["strTableName"]; dr["idfParentSearchTable"] = rLookup["idfParentSearchTable"]; if (qsoInfo.IsFFObject) { dr["intTableCount"] = 0; } else { if (rLookup["intTableCount"] is int) { dr["intTableCount"] = (int) rLookup["intTableCount"]; } else { dr["intTableCount"] = 0; } } m_SearchTable.Rows.Add(dr); } DataRow rCurTable = m_SearchTable.Rows.Find(searchTableForUnionTableID); if (rCurTable != null) { if (qsoInfo.IsFFObject) { DataRow rFFField = LookupCache.GetLookupRow(r["FieldAlias"], LookupTables.ParameterForFFType.ToString()); rCurTable["intTableCount"] = (int) rCurTable["intTableCount"] + 1; if (Utils.IsEmpty(rFFField["idfsReferenceType"]) == false) { rCurTable["intTableCount"] = (int) rCurTable["intTableCount"] + 1; } } else if ((Utils.IsEmpty(rFieldLookup["idfsReferenceType"]) == false) || (Utils.IsEmpty(rFieldLookup["idfsGISReferenceType"]) == false) || (Utils.Str(rFieldLookup["blnGeoLocationString"], "0") == "1") ) { rCurTable["intTableCount"] = (int) rCurTable["intTableCount"] + 1; } } } } } } } } } } m_SearchTable.AcceptChanges(); foreach (DataRowView rUnionTable in dvMainTableForObject) { object unionSearchTableID = rUnionTable["idfUnionSearchTable"]; object mainSearchTableID = idfUniqueMainSearchTable; if (Utils.IsEmpty(idfUniqueMainSearchTable)) { mainSearchTableID = unionSearchTableID; } DataRow[] drNew = m_SearchTable.Select( string.Format("idfUnionSearchTable = {0} and idfParentSearchTable is not null ", unionSearchTableID), "idfSearchTableForUnionTable", DataViewRowState.Added); while (drNew.Length > 0) { DataRow rLookup = LookupCache.GetLookupRow( string.Format("{0}__{1}__{2}", unionSearchTableID, mainSearchTableID, drNew[0]["idfParentSearchTable"]), LookupTables.SearchTable.ToString()); if (rLookup != null) { object searchTableID = rLookup["idfSearchTable"]; string searchTableForUnionTableID = string.Format("{0}__{1}", unionSearchTableID, searchTableID); if (m_SearchTable.Rows.Find(searchTableID) == null) { DataRow dr = m_SearchTable.NewRow(); dr["idfSearchTableForUnionTable"] = searchTableForUnionTableID; dr["idfSearchTable"] = searchTableID; dr["idfUnionSearchTable"] = unionSearchTableID; dr["strTableName"] = rLookup["strTableName"]; dr["idfParentSearchTable"] = rLookup["idfParentSearchTable"]; dr["intTableCount"] = rLookup["intTableCount"]; m_SearchTable.Rows.Add(dr); } } DataRow rNew = m_SearchTable.Rows.Find(drNew[0]["idfSearchTableForUnionTable"]); if (rNew != null) { rNew.AcceptChanges(); } } int curUnionPartComplexity = 1; foreach (DataRow rTable in m_SearchTable.Select(string.Format("idfUnionSearchTable = {0} ", unionSearchTableID))) { if ((Utils.IsEmpty(rTable["intTableCount"]) == false) && (rTable["intTableCount"] is int)) { curUnionPartComplexity = curUnionPartComplexity + (int) rTable["intTableCount"]; } } if (complexity < curUnionPartComplexity) { complexity = curUnionPartComplexity; } } return complexity; } public override bool ValidateData() { if (base.ValidateData() == false) { return false; } if ((baseDataSet == null) || (baseDataSet.Tables.Contains(Query_DB.TasQuery) == false)) { return true; } if (DefaultQueryNameExists(Utils.Str(baseDataSet.Tables[Query_DB.TasQuery].Rows[0]["DefQueryName"]))) { MessageForm.Show(EidssMessages.Get("msgNoUniqueQueryName", "Query with that name already exists. Please modify it.")); return false; } if ((ModelUserContext.CurrentLanguage != Localizer.lngEn) && QueryNameExists(Utils.Str(baseDataSet.Tables[Query_DB.TasQuery].Rows[0]["DefQueryName"]))) { MessageForm.Show(EidssMessages.Get("msgNoUniqueNatQueryName", "Query with that national name already exists. Please modify it.")); return false; } if (qsoRoot.ValidateQSOInfo() == false) { trlQuery.Select(); trlQuery.FocusedNode = trlQuery.Nodes[0]; qsoRoot.ShowQSOInfoError(); return false; } for (int aOrder = 1; aOrder <= m_ObjectList.Count; aOrder++) { QuerySearchObjectInfo qsoInfo = m_ObjectList.Item(aOrder); if (qsoInfo.ValidateQSOInfo() == false) { TreeListNode node = trlQuery.FindNodeByFieldValue("idfsSearchObject", qsoInfo.SearchObject); if (node != null) { trlQuery.Select(); trlQuery.FocusedNode = node; qsoInfo.ShowQSOInfoError(); return false; } } } int queryComplexity = GetQueryComplexity(); if (queryComplexity > 256) { MessageForm.Show( string.Format( EidssMessages.Get("msgQueryComplexity", "Complexity of query is greater than allowed value. Please remove {0} flexible form fields or change query."), EidssMessages.Get("msgSeveral", "several"))); return false; } return true; } #endregion #region CreateFunction private void QueryDetail_OnAfterPost(object sender, EventArgs e) { m_QueryDbService.CreateFunction(); m_Modified = true; } #endregion private void navBarControl1_GroupCollapsed(object sender, NavBarGroupEventArgs e) { int height = 0; foreach (NavBarGroup group in navBarControl1.Groups) { height += BaseAvrPresenter.NavBarGroupHeaderHeight; if (group.Expanded) { height += group.ControlContainer.Height; } } navBarControl1.ClientSize = new Size(navBarControl1.ClientSize.Width, height); } private void cmdClose_Click(object sender, EventArgs e) { var baseForm = ParentObject as BaseForm; if (baseForm != null) { baseForm.SetClosingMode(ClosingMode.Cancel); } ProcessAfterPost(() => { Hide(); RaiseSendCommand(new QueryLayoutCloseCommand(sender)); }); } private void cmdSave_Click(object sender, EventArgs e) { ProcessAfterPost(null); } private void cmdNew_Click(object sender, EventArgs e) { ProcessAfterPost(() => { State = BusinessObjectState.NewObject | BusinessObjectState.IntermediateObject; ReadOnly = false; object id = null; LoadData(ref id); m_IsNewOrCopy = true; }); } private void ProcessAfterPost(Action handler) { if (LockHandler()) { try { if (QueryPost()) { if (handler != null) { handler(); } } else { SelectLastFocusedControl(); } } finally { UnlockHandler(); } } } private void cmdDelete_Click(object sender, EventArgs e) { object key = GetKey(); if (!(key is long)) { throw new AvrException(string.Format("QueryDetail key {0} is not long", key)); } RaiseSendCommand(new QueryLayoutDeleteCommand(sender, (long) key, AvrTreeElementType.Query)); } private void cmdCancelChanges_Click(object sender, EventArgs e) { DataEventManager.Flush(); if ((HasChanges() == false) && ((m_QueryDbService.IsNewObject == false) || IsBlankQuery())) { return; } string msg = EidssMessages.Get("msgConfirmCancelChangesForm", "Return the form to the last saved state?"); if (m_QueryDbService.IsNewObject && m_ShowClearFormPrompt) { msg = EidssMessages.Get("msgConfirmClearForm", "Clear the form content?"); } if (m_ShowClearFormPrompt && MessageForm.Show(msg, "", MessageBoxButtons.YesNo) != DialogResult.Yes) { SelectLastFocusedControl(); return; } if (m_ObjectList != null) { for (int i = Children.Count - 1; i >= 0; i--) { IRelatedObject qso = Children[i]; if (qso is QuerySearchObjectInfo) { var qsoInfo = qso as QuerySearchObjectInfo; if (m_ObjectList.Contains(qsoInfo)) { UnRegisterChildObject(qsoInfo); } } } m_ObjectList.Clear(); } if (m_QueryDbService.IsNewObject) { object id = null; LoadData(ref id); } else { object id = m_QueryDbService.ID; LoadData(ref id); } DefineBinding(); } private void cmdCopy_Click(object sender, EventArgs e) { if (QueryPost()) { m_QueryDbService.Copy(baseDataSet, null); baseDataSet.Tables[Query_DB.TasQuery].Rows[0]["DefQueryName"] = GetCopyDefaultQueryName(baseDataSet.Tables[Query_DB.TasQuery].Rows[0]["DefQueryName"]); if (ModelUserContext.CurrentLanguage != Localizer.lngEn) { baseDataSet.Tables[Query_DB.TasQuery].Rows[0]["QueryName"] = GetCopyQueryName(baseDataSet.Tables[Query_DB.TasQuery].Rows[0]["QueryName"]); } else { baseDataSet.Tables[Query_DB.TasQuery].Rows[0]["QueryName"] = baseDataSet.Tables[Query_DB.TasQuery].Rows[0]["DefQueryName"]; } long qsoRootID = -1L; qsoRoot.Copy(qsoRootID, (long) m_QueryDbService.ID); DataRow rRoot = GetObjectRowBySearchObject(qsoRoot.SearchObject); if ((rRoot != null) && Utils.IsEmpty(rRoot["idfParentQuerySearchObject"])) { rRoot["idfQuerySearchObject"] = qsoRootID; rRoot["idflQuery"] = m_QueryDbService.ID; } qsoRoot.baseDataSet.AcceptChanges(); for (int aOrder = 1; aOrder <= m_ObjectList.Count; aOrder++) { QuerySearchObjectInfo qsoInfo = m_ObjectList.Item(aOrder); qsoInfo.Copy((-aOrder - 1), (long) m_QueryDbService.ID); DataRow rChild = GetObjectRowBySearchObject(qsoInfo.SearchObject); if ((rChild != null) && (Utils.IsEmpty(rChild["idfParentQuerySearchObject"]) == false)) { rChild["idfQuerySearchObject"] = (long) (-aOrder - 1); rChild["idfParentQuerySearchObject"] = qsoRootID; rChild["idflQuery"] = m_QueryDbService.ID; } qsoInfo.baseDataSet.AcceptChanges(); } baseDataSet.AcceptChanges(); m_IsNewOrCopy = true; if (ReadOnly != m_ReadOnlyMode) { ReadOnly = m_ReadOnlyMode; } } } private void RaiseRefreshCaption() { string queryName = IsNationalLanguage ? txtQueryName.Text : txtDefQueryName.Text; RaiseSendCommand(new RefreshCaptionCommand(this, queryName)); } } }
// Code generated by Microsoft (R) AutoRest Code Generator 1.0.1.0 // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace ClientApi.Router { using Microsoft.Rest; using Newtonsoft.Json; using System.Collections; using System.Collections.Generic; using System.IO; using System.Net; using System.Net.Http; using System.Threading; using System.Threading.Tasks; /// <summary> /// Request operations. /// </summary> public partial class Request : IServiceOperations<PubSubClientApi>, IRequest { /// <summary> /// Initializes a new instance of the Request class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public Request(PubSubClientApi client) { if (client == null) { throw new System.ArgumentNullException("client"); } Client = client; } /// <summary> /// Gets a reference to the PubSubClientApi /// </summary> public PubSubClientApi Client { get; private set; } /// <param name='tenantId'> /// </param> /// <param name='topicName'> /// </param> /// <param name='message'> /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="HttpOperationException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<HttpOperationResponse<object>> PostWithHttpMessagesAsync(string tenantId, string topicName, string message, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (tenantId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "tenantId"); } if (topicName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "topicName"); } if (message == null) { throw new ValidationException(ValidationRules.CannotBeNull, "message"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("tenantId", tenantId); tracingParameters.Add("topicName", topicName); tracingParameters.Add("message", message); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "Post", tracingParameters); } // Construct URL var _baseUrl = Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "api/Request/{tenantId}/{topicName}").ToString(); _url = _url.Replace("{tenantId}", System.Uri.EscapeDataString(tenantId)); _url = _url.Replace("{topicName}", System.Uri.EscapeDataString(topicName)); List<string> _queryParameters = new List<string>(); if (message != null) { _queryParameters.Add(string.Format("message={0}", System.Uri.EscapeDataString(message))); } if (_queryParameters.Count > 0) { _url += "?" + string.Join("&", _queryParameters); } // Create HTTP transport objects var _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("POST"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new HttpOperationException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); if (_httpResponse.Content != null) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); } else { _responseContent = string.Empty; } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new HttpOperationResponse<object>(); _result.Request = _httpRequest; _result.Response = _httpResponse; // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<object>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <param name='tenantId'> /// </param> /// <param name='topicName'> /// </param> /// <param name='subscriberName'> /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="HttpOperationException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<HttpOperationResponse<object>> GetWithHttpMessagesAsync(string tenantId, string topicName, string subscriberName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (tenantId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "tenantId"); } if (topicName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "topicName"); } if (subscriberName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "subscriberName"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("tenantId", tenantId); tracingParameters.Add("topicName", topicName); tracingParameters.Add("subscriberName", subscriberName); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "Get", tracingParameters); } // Construct URL var _baseUrl = Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "api/Request/{tenantId}/{topicName}/{subscriberName}").ToString(); _url = _url.Replace("{tenantId}", System.Uri.EscapeDataString(tenantId)); _url = _url.Replace("{topicName}", System.Uri.EscapeDataString(topicName)); _url = _url.Replace("{subscriberName}", System.Uri.EscapeDataString(subscriberName)); // Create HTTP transport objects var _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new HttpOperationException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); if (_httpResponse.Content != null) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); } else { _responseContent = string.Empty; } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new HttpOperationResponse<object>(); _result.Request = _httpRequest; _result.Response = _httpResponse; // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<object>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } } }
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on // an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the // specific language governing permissions and limitations under the License. //------------------------------------------------------------------------------ // <auto-generated> // This code was generated by google-apis-code-generator 1.5.1 // C# generator version: 1.10.0 // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // </auto-generated> //------------------------------------------------------------------------------ /** * \brief * APIs Discovery Service Version v1 * * \section ApiInfo API Version Information * <table> * <tr><th>API * <td><a href='https://developers.google.com/discovery/'>APIs Discovery Service</a> * <tr><th>API Version<td>v1 * <tr><th>API Rev<td>snapshot (0) * <tr><th>API Docs * <td><a href='https://developers.google.com/discovery/'> * https://developers.google.com/discovery/</a> * <tr><th>Discovery Name<td>discovery * </table> * * \section ForMoreInfo For More Information * * The complete API documentation for using APIs Discovery Service can be found at * <a href='https://developers.google.com/discovery/'>https://developers.google.com/discovery/</a>. * * For more information about the Google APIs Client Library for .NET, see * <a href='https://developers.google.com/api-client-library/dotnet/get_started'> * https://developers.google.com/api-client-library/dotnet/get_started</a> */ namespace Google.Apis.Discovery.v1 { /// <summary>The Discovery Service.</summary> public class DiscoveryService : Google.Apis.Services.BaseClientService { /// <summary>The API version.</summary> public const string Version = "v1"; /// <summary>The discovery version used to generate this service.</summary> public static Google.Apis.Discovery.DiscoveryVersion DiscoveryVersionUsed = Google.Apis.Discovery.DiscoveryVersion.Version_1_0; /// <summary>Constructs a new service.</summary> public DiscoveryService() : this(new Google.Apis.Services.BaseClientService.Initializer()) {} /// <summary>Constructs a new service.</summary> /// <param name="initializer">The service initializer.</param> public DiscoveryService(Google.Apis.Services.BaseClientService.Initializer initializer) : base(initializer) { apis = new ApisResource(this); } /// <summary>Gets the service supported features.</summary> public override System.Collections.Generic.IList<string> Features { get { return new string[0]; } } /// <summary>Gets the service name.</summary> public override string Name { get { return "discovery"; } } /// <summary>Gets the service base URI.</summary> public override string BaseUri { get { return "https://www.googleapis.com/discovery/v1/"; } } /// <summary>Gets the service base path.</summary> public override string BasePath { get { return "discovery/v1/"; } } private readonly ApisResource apis; /// <summary>Gets the Apis resource.</summary> public virtual ApisResource Apis { get { return apis; } } } ///<summary>A base abstract class for Discovery requests.</summary> public abstract class DiscoveryBaseServiceRequest<TResponse> : Google.Apis.Requests.ClientServiceRequest<TResponse> { ///<summary>Constructs a new DiscoveryBaseServiceRequest instance.</summary> protected DiscoveryBaseServiceRequest(Google.Apis.Services.IClientService service) : base(service) { } /// <summary>Data format for the response.</summary> /// [default: json] [Google.Apis.Util.RequestParameterAttribute("alt", Google.Apis.Util.RequestParameterType.Query)] public virtual System.Nullable<AltEnum> Alt { get; set; } /// <summary>Data format for the response.</summary> public enum AltEnum { /// <summary>Responses with Content-Type of application/json</summary> [Google.Apis.Util.StringValueAttribute("json")] Json, } /// <summary>Selector specifying which fields to include in a partial response.</summary> [Google.Apis.Util.RequestParameterAttribute("fields", Google.Apis.Util.RequestParameterType.Query)] public virtual string Fields { get; set; } /// <summary>API key. Your API key identifies your project and provides you with API access, quota, and reports. /// Required unless you provide an OAuth 2.0 token.</summary> [Google.Apis.Util.RequestParameterAttribute("key", Google.Apis.Util.RequestParameterType.Query)] public virtual string Key { get; set; } /// <summary>OAuth 2.0 token for the current user.</summary> [Google.Apis.Util.RequestParameterAttribute("oauth_token", Google.Apis.Util.RequestParameterType.Query)] public virtual string OauthToken { get; set; } /// <summary>Returns response with indentations and line breaks.</summary> /// [default: true] [Google.Apis.Util.RequestParameterAttribute("prettyPrint", Google.Apis.Util.RequestParameterType.Query)] public virtual System.Nullable<bool> PrettyPrint { get; set; } /// <summary>Available to use for quota purposes for server-side applications. Can be any arbitrary string /// assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.</summary> [Google.Apis.Util.RequestParameterAttribute("quotaUser", Google.Apis.Util.RequestParameterType.Query)] public virtual string QuotaUser { get; set; } /// <summary>IP address of the site where the request originates. Use this if you want to enforce per-user /// limits.</summary> [Google.Apis.Util.RequestParameterAttribute("userIp", Google.Apis.Util.RequestParameterType.Query)] public virtual string UserIp { get; set; } /// <summary>Initializes Discovery parameter list.</summary> protected override void InitParameters() { base.InitParameters(); RequestParameters.Add( "alt", new Google.Apis.Discovery.Parameter { Name = "alt", IsRequired = false, ParameterType = "query", DefaultValue = "json", Pattern = null, }); RequestParameters.Add( "fields", new Google.Apis.Discovery.Parameter { Name = "fields", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add( "key", new Google.Apis.Discovery.Parameter { Name = "key", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add( "oauth_token", new Google.Apis.Discovery.Parameter { Name = "oauth_token", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add( "prettyPrint", new Google.Apis.Discovery.Parameter { Name = "prettyPrint", IsRequired = false, ParameterType = "query", DefaultValue = "true", Pattern = null, }); RequestParameters.Add( "quotaUser", new Google.Apis.Discovery.Parameter { Name = "quotaUser", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add( "userIp", new Google.Apis.Discovery.Parameter { Name = "userIp", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); } } /// <summary>The "apis" collection of methods.</summary> public class ApisResource { private const string Resource = "apis"; /// <summary>The service which this resource belongs to.</summary> private readonly Google.Apis.Services.IClientService service; /// <summary>Constructs a new resource.</summary> public ApisResource(Google.Apis.Services.IClientService service) { this.service = service; } /// <summary>Retrieve the description of a particular version of an api.</summary> /// <param name="api">The name of the API.</param> /// <param name="version">The version of the API.</param> public virtual GetRestRequest GetRest(string api, string version) { return new GetRestRequest(service, api, version); } /// <summary>Retrieve the description of a particular version of an api.</summary> public class GetRestRequest : DiscoveryBaseServiceRequest<Google.Apis.Discovery.v1.Data.RestDescription> { /// <summary>Constructs a new GetRest request.</summary> public GetRestRequest(Google.Apis.Services.IClientService service, string api, string version) : base(service) { Api = api; Version = version; InitParameters(); } /// <summary>The name of the API.</summary> [Google.Apis.Util.RequestParameterAttribute("api", Google.Apis.Util.RequestParameterType.Path)] public virtual string Api { get; private set; } /// <summary>The version of the API.</summary> [Google.Apis.Util.RequestParameterAttribute("version", Google.Apis.Util.RequestParameterType.Path)] public virtual string Version { get; private set; } ///<summary>Gets the method name.</summary> public override string MethodName { get { return "getRest"; } } ///<summary>Gets the HTTP method.</summary> public override string HttpMethod { get { return "GET"; } } ///<summary>Gets the REST path.</summary> public override string RestPath { get { return "apis/{api}/{version}/rest"; } } /// <summary>Initializes GetRest parameter list.</summary> protected override void InitParameters() { base.InitParameters(); RequestParameters.Add( "api", new Google.Apis.Discovery.Parameter { Name = "api", IsRequired = true, ParameterType = "path", DefaultValue = null, Pattern = null, }); RequestParameters.Add( "version", new Google.Apis.Discovery.Parameter { Name = "version", IsRequired = true, ParameterType = "path", DefaultValue = null, Pattern = null, }); } } /// <summary>Retrieve the list of APIs supported at this endpoint.</summary> public virtual ListRequest List() { return new ListRequest(service); } /// <summary>Retrieve the list of APIs supported at this endpoint.</summary> public class ListRequest : DiscoveryBaseServiceRequest<Google.Apis.Discovery.v1.Data.DirectoryList> { /// <summary>Constructs a new List request.</summary> public ListRequest(Google.Apis.Services.IClientService service) : base(service) { InitParameters(); } /// <summary>Only include APIs with the given name.</summary> [Google.Apis.Util.RequestParameterAttribute("name", Google.Apis.Util.RequestParameterType.Query)] public virtual string Name { get; set; } /// <summary>Return only the preferred version of an API.</summary> /// [default: false] [Google.Apis.Util.RequestParameterAttribute("preferred", Google.Apis.Util.RequestParameterType.Query)] public virtual System.Nullable<bool> Preferred { get; set; } ///<summary>Gets the method name.</summary> public override string MethodName { get { return "list"; } } ///<summary>Gets the HTTP method.</summary> public override string HttpMethod { get { return "GET"; } } ///<summary>Gets the REST path.</summary> public override string RestPath { get { return "apis"; } } /// <summary>Initializes List parameter list.</summary> protected override void InitParameters() { base.InitParameters(); RequestParameters.Add( "name", new Google.Apis.Discovery.Parameter { Name = "name", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add( "preferred", new Google.Apis.Discovery.Parameter { Name = "preferred", IsRequired = false, ParameterType = "query", DefaultValue = "false", Pattern = null, }); } } } } namespace Google.Apis.Discovery.v1.Data { public class DirectoryList : Google.Apis.Requests.IDirectResponseSchema { /// <summary>Indicate the version of the Discovery API used to generate this doc.</summary> [Newtonsoft.Json.JsonPropertyAttribute("discoveryVersion")] public virtual string DiscoveryVersion { get; set; } /// <summary>The individual directory entries. One entry per api/version pair.</summary> [Newtonsoft.Json.JsonPropertyAttribute("items")] public virtual System.Collections.Generic.IList<DirectoryList.ItemsData> Items { get; set; } /// <summary>The kind for this response.</summary> [Newtonsoft.Json.JsonPropertyAttribute("kind")] public virtual string Kind { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } public class ItemsData { /// <summary>The description of this API.</summary> [Newtonsoft.Json.JsonPropertyAttribute("description")] public virtual string Description { get; set; } /// <summary>A link to the discovery document.</summary> [Newtonsoft.Json.JsonPropertyAttribute("discoveryLink")] public virtual string DiscoveryLink { get; set; } /// <summary>The URL for the discovery REST document.</summary> [Newtonsoft.Json.JsonPropertyAttribute("discoveryRestUrl")] public virtual string DiscoveryRestUrl { get; set; } /// <summary>A link to human readable documentation for the API.</summary> [Newtonsoft.Json.JsonPropertyAttribute("documentationLink")] public virtual string DocumentationLink { get; set; } /// <summary>Links to 16x16 and 32x32 icons representing the API.</summary> [Newtonsoft.Json.JsonPropertyAttribute("icons")] public virtual ItemsData.IconsData Icons { get; set; } /// <summary>The id of this API.</summary> [Newtonsoft.Json.JsonPropertyAttribute("id")] public virtual string Id { get; set; } /// <summary>The kind for this response.</summary> [Newtonsoft.Json.JsonPropertyAttribute("kind")] public virtual string Kind { get; set; } /// <summary>Labels for the status of this API, such as labs or deprecated.</summary> [Newtonsoft.Json.JsonPropertyAttribute("labels")] public virtual System.Collections.Generic.IList<string> Labels { get; set; } /// <summary>The name of the API.</summary> [Newtonsoft.Json.JsonPropertyAttribute("name")] public virtual string Name { get; set; } /// <summary>True if this version is the preferred version to use.</summary> [Newtonsoft.Json.JsonPropertyAttribute("preferred")] public virtual System.Nullable<bool> Preferred { get; set; } /// <summary>The title of this API.</summary> [Newtonsoft.Json.JsonPropertyAttribute("title")] public virtual string Title { get; set; } /// <summary>The version of the API.</summary> [Newtonsoft.Json.JsonPropertyAttribute("version")] public virtual string Version { get; set; } /// <summary>Links to 16x16 and 32x32 icons representing the API.</summary> public class IconsData { /// <summary>The URL of the 16x16 icon.</summary> [Newtonsoft.Json.JsonPropertyAttribute("x16")] public virtual string X16 { get; set; } /// <summary>The URL of the 32x32 icon.</summary> [Newtonsoft.Json.JsonPropertyAttribute("x32")] public virtual string X32 { get; set; } } } } public class JsonSchema : Google.Apis.Requests.IDirectResponseSchema { /// <summary>A reference to another schema. The value of this property is the "id" of another schema.</summary> [Newtonsoft.Json.JsonPropertyAttribute("$ref")] public virtual string Ref__ { get; set; } /// <summary>If this is a schema for an object, this property is the schema for any additional properties with /// dynamic keys on this object.</summary> [Newtonsoft.Json.JsonPropertyAttribute("additionalProperties")] public virtual JsonSchema AdditionalProperties { get; set; } /// <summary>Additional information about this property.</summary> [Newtonsoft.Json.JsonPropertyAttribute("annotations")] public virtual JsonSchema.AnnotationsData Annotations { get; set; } /// <summary>The default value of this property (if one exists).</summary> [Newtonsoft.Json.JsonPropertyAttribute("default")] public virtual string Default__ { get; set; } /// <summary>A description of this object.</summary> [Newtonsoft.Json.JsonPropertyAttribute("description")] public virtual string Description { get; set; } /// <summary>Values this parameter may take (if it is an enum).</summary> [Newtonsoft.Json.JsonPropertyAttribute("enum")] public virtual System.Collections.Generic.IList<string> Enum__ { get; set; } /// <summary>The descriptions for the enums. Each position maps to the corresponding value in the "enum" /// array.</summary> [Newtonsoft.Json.JsonPropertyAttribute("enumDescriptions")] public virtual System.Collections.Generic.IList<string> EnumDescriptions { get; set; } /// <summary>An additional regular expression or key that helps constrain the value. For more details see: /// http://tools.ietf.org/html/draft-zyp-json-schema-03#section-5.23</summary> [Newtonsoft.Json.JsonPropertyAttribute("format")] public virtual string Format { get; set; } /// <summary>Unique identifier for this schema.</summary> [Newtonsoft.Json.JsonPropertyAttribute("id")] public virtual string Id { get; set; } /// <summary>If this is a schema for an array, this property is the schema for each element in the /// array.</summary> [Newtonsoft.Json.JsonPropertyAttribute("items")] public virtual JsonSchema Items { get; set; } /// <summary>Whether this parameter goes in the query or the path for REST requests.</summary> [Newtonsoft.Json.JsonPropertyAttribute("location")] public virtual string Location { get; set; } /// <summary>The maximum value of this parameter.</summary> [Newtonsoft.Json.JsonPropertyAttribute("maximum")] public virtual string Maximum { get; set; } /// <summary>The minimum value of this parameter.</summary> [Newtonsoft.Json.JsonPropertyAttribute("minimum")] public virtual string Minimum { get; set; } /// <summary>The regular expression this parameter must conform to. Uses Java 6 regex format: /// http://docs.oracle.com/javase/6/docs/api/java/util/regex/Pattern.html</summary> [Newtonsoft.Json.JsonPropertyAttribute("pattern")] public virtual string Pattern { get; set; } /// <summary>If this is a schema for an object, list the schema for each property of this object.</summary> [Newtonsoft.Json.JsonPropertyAttribute("properties")] public virtual System.Collections.Generic.IDictionary<string,JsonSchema> Properties { get; set; } /// <summary>The value is read-only, generated by the service. The value cannot be modified by the client. If /// the value is included in a POST, PUT, or PATCH request, it is ignored by the service.</summary> [Newtonsoft.Json.JsonPropertyAttribute("readOnly")] public virtual System.Nullable<bool> ReadOnly__ { get; set; } /// <summary>Whether this parameter may appear multiple times.</summary> [Newtonsoft.Json.JsonPropertyAttribute("repeated")] public virtual System.Nullable<bool> Repeated { get; set; } /// <summary>Whether the parameter is required.</summary> [Newtonsoft.Json.JsonPropertyAttribute("required")] public virtual System.Nullable<bool> Required { get; set; } /// <summary>The value type for this schema. A list of values can be found here: http://tools.ietf.org/html /// /draft-zyp-json-schema-03#section-5.1</summary> [Newtonsoft.Json.JsonPropertyAttribute("type")] public virtual string Type { get; set; } /// <summary>In a variant data type, the value of one property is used to determine how to interpret the entire /// entity. Its value must exist in a map of descriminant values to schema names.</summary> [Newtonsoft.Json.JsonPropertyAttribute("variant")] public virtual JsonSchema.VariantData Variant { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } /// <summary>Additional information about this property.</summary> public class AnnotationsData { /// <summary>A list of methods for which this property is required on requests.</summary> [Newtonsoft.Json.JsonPropertyAttribute("required")] public virtual System.Collections.Generic.IList<string> Required { get; set; } } /// <summary>In a variant data type, the value of one property is used to determine how to interpret the entire /// entity. Its value must exist in a map of descriminant values to schema names.</summary> public class VariantData { /// <summary>The name of the type discriminant property.</summary> [Newtonsoft.Json.JsonPropertyAttribute("discriminant")] public virtual string Discriminant { get; set; } /// <summary>The map of discriminant value to schema to use for parsing..</summary> [Newtonsoft.Json.JsonPropertyAttribute("map")] public virtual System.Collections.Generic.IList<VariantData.MapData> Map { get; set; } public class MapData { [Newtonsoft.Json.JsonPropertyAttribute("$ref")] public virtual string Ref__ { get; set; } [Newtonsoft.Json.JsonPropertyAttribute("type_value")] public virtual string TypeValue { get; set; } } } } public class RestDescription : Google.Apis.Requests.IDirectResponseSchema { /// <summary>Authentication information.</summary> [Newtonsoft.Json.JsonPropertyAttribute("auth")] public virtual RestDescription.AuthData Auth { get; set; } /// <summary>[DEPRECATED] The base path for REST requests.</summary> [Newtonsoft.Json.JsonPropertyAttribute("basePath")] public virtual string BasePath { get; set; } /// <summary>[DEPRECATED] The base URL for REST requests.</summary> [Newtonsoft.Json.JsonPropertyAttribute("baseUrl")] public virtual string BaseUrl { get; set; } /// <summary>The path for REST batch requests.</summary> [Newtonsoft.Json.JsonPropertyAttribute("batchPath")] public virtual string BatchPath { get; set; } /// <summary>Indicates how the API name should be capitalized and split into various parts. Useful for /// generating pretty class names.</summary> [Newtonsoft.Json.JsonPropertyAttribute("canonicalName")] public virtual string CanonicalName { get; set; } /// <summary>The description of this API.</summary> [Newtonsoft.Json.JsonPropertyAttribute("description")] public virtual string Description { get; set; } /// <summary>Indicate the version of the Discovery API used to generate this doc.</summary> [Newtonsoft.Json.JsonPropertyAttribute("discoveryVersion")] public virtual string DiscoveryVersion { get; set; } /// <summary>A link to human readable documentation for the API.</summary> [Newtonsoft.Json.JsonPropertyAttribute("documentationLink")] public virtual string DocumentationLink { get; set; } /// <summary>The ETag for this response.</summary> [Newtonsoft.Json.JsonPropertyAttribute("etag")] public virtual string ETag { get; set; } /// <summary>Enable exponential backoff for suitable methods in the generated clients.</summary> [Newtonsoft.Json.JsonPropertyAttribute("exponentialBackoffDefault")] public virtual System.Nullable<bool> ExponentialBackoffDefault { get; set; } /// <summary>A list of supported features for this API.</summary> [Newtonsoft.Json.JsonPropertyAttribute("features")] public virtual System.Collections.Generic.IList<string> Features { get; set; } /// <summary>Links to 16x16 and 32x32 icons representing the API.</summary> [Newtonsoft.Json.JsonPropertyAttribute("icons")] public virtual RestDescription.IconsData Icons { get; set; } /// <summary>The ID of this API.</summary> [Newtonsoft.Json.JsonPropertyAttribute("id")] public virtual string Id { get; set; } /// <summary>The kind for this response.</summary> [Newtonsoft.Json.JsonPropertyAttribute("kind")] public virtual string Kind { get; set; } /// <summary>Labels for the status of this API, such as labs or deprecated.</summary> [Newtonsoft.Json.JsonPropertyAttribute("labels")] public virtual System.Collections.Generic.IList<string> Labels { get; set; } /// <summary>API-level methods for this API.</summary> [Newtonsoft.Json.JsonPropertyAttribute("methods")] public virtual System.Collections.Generic.IDictionary<string,RestMethod> Methods { get; set; } /// <summary>The name of this API.</summary> [Newtonsoft.Json.JsonPropertyAttribute("name")] public virtual string Name { get; set; } /// <summary>The domain of the owner of this API. Together with the ownerName and a packagePath values, this can /// be used to generate a library for this API which would have a unique fully qualified name.</summary> [Newtonsoft.Json.JsonPropertyAttribute("ownerDomain")] public virtual string OwnerDomain { get; set; } /// <summary>The name of the owner of this API. See ownerDomain.</summary> [Newtonsoft.Json.JsonPropertyAttribute("ownerName")] public virtual string OwnerName { get; set; } /// <summary>The package of the owner of this API. See ownerDomain.</summary> [Newtonsoft.Json.JsonPropertyAttribute("packagePath")] public virtual string PackagePath { get; set; } /// <summary>Common parameters that apply across all apis.</summary> [Newtonsoft.Json.JsonPropertyAttribute("parameters")] public virtual System.Collections.Generic.IDictionary<string,JsonSchema> Parameters { get; set; } /// <summary>The protocol described by this document.</summary> [Newtonsoft.Json.JsonPropertyAttribute("protocol")] public virtual string Protocol { get; set; } /// <summary>The resources in this API.</summary> [Newtonsoft.Json.JsonPropertyAttribute("resources")] public virtual System.Collections.Generic.IDictionary<string,RestResource> Resources { get; set; } /// <summary>The version of this API.</summary> [Newtonsoft.Json.JsonPropertyAttribute("revision")] public virtual string Revision { get; set; } /// <summary>The root URL under which all API services live.</summary> [Newtonsoft.Json.JsonPropertyAttribute("rootUrl")] public virtual string RootUrl { get; set; } /// <summary>The schemas for this API.</summary> [Newtonsoft.Json.JsonPropertyAttribute("schemas")] public virtual System.Collections.Generic.IDictionary<string,JsonSchema> Schemas { get; set; } /// <summary>The base path for all REST requests.</summary> [Newtonsoft.Json.JsonPropertyAttribute("servicePath")] public virtual string ServicePath { get; set; } /// <summary>The title of this API.</summary> [Newtonsoft.Json.JsonPropertyAttribute("title")] public virtual string Title { get; set; } /// <summary>The version of this API.</summary> [Newtonsoft.Json.JsonPropertyAttribute("version")] public virtual string Version { get; set; } [Newtonsoft.Json.JsonPropertyAttribute("version_module")] public virtual System.Nullable<bool> VersionModule { get; set; } /// <summary>Authentication information.</summary> public class AuthData { /// <summary>OAuth 2.0 authentication information.</summary> [Newtonsoft.Json.JsonPropertyAttribute("oauth2")] public virtual AuthData.Oauth2Data Oauth2 { get; set; } /// <summary>OAuth 2.0 authentication information.</summary> public class Oauth2Data { /// <summary>Available OAuth 2.0 scopes.</summary> [Newtonsoft.Json.JsonPropertyAttribute("scopes")] public virtual System.Collections.Generic.IDictionary<string,Oauth2Data.ScopesDataElement> Scopes { get; set; } /// <summary>The scope value.</summary> public class ScopesDataElement { /// <summary>Description of scope.</summary> [Newtonsoft.Json.JsonPropertyAttribute("description")] public virtual string Description { get; set; } } } } /// <summary>Links to 16x16 and 32x32 icons representing the API.</summary> public class IconsData { /// <summary>The URL of the 16x16 icon.</summary> [Newtonsoft.Json.JsonPropertyAttribute("x16")] public virtual string X16 { get; set; } /// <summary>The URL of the 32x32 icon.</summary> [Newtonsoft.Json.JsonPropertyAttribute("x32")] public virtual string X32 { get; set; } } } public class RestMethod : Google.Apis.Requests.IDirectResponseSchema { /// <summary>Description of this method.</summary> [Newtonsoft.Json.JsonPropertyAttribute("description")] public virtual string Description { get; set; } /// <summary>Whether this method requires an ETag to be specified. The ETag is sent as an HTTP If-Match or If- /// None-Match header.</summary> [Newtonsoft.Json.JsonPropertyAttribute("etagRequired")] public virtual System.Nullable<bool> EtagRequired { get; set; } /// <summary>HTTP method used by this method.</summary> [Newtonsoft.Json.JsonPropertyAttribute("httpMethod")] public virtual string HttpMethod { get; set; } /// <summary>A unique ID for this method. This property can be used to match methods between different versions /// of Discovery.</summary> [Newtonsoft.Json.JsonPropertyAttribute("id")] public virtual string Id { get; set; } /// <summary>Media upload parameters.</summary> [Newtonsoft.Json.JsonPropertyAttribute("mediaUpload")] public virtual RestMethod.MediaUploadData MediaUpload { get; set; } /// <summary>Ordered list of required parameters, serves as a hint to clients on how to structure their method /// signatures. The array is ordered such that the "most-significant" parameter appears first.</summary> [Newtonsoft.Json.JsonPropertyAttribute("parameterOrder")] public virtual System.Collections.Generic.IList<string> ParameterOrder { get; set; } /// <summary>Details for all parameters in this method.</summary> [Newtonsoft.Json.JsonPropertyAttribute("parameters")] public virtual System.Collections.Generic.IDictionary<string,JsonSchema> Parameters { get; set; } /// <summary>The URI path of this REST method. Should be used in conjunction with the basePath property at the /// api-level.</summary> [Newtonsoft.Json.JsonPropertyAttribute("path")] public virtual string Path { get; set; } /// <summary>The schema for the request.</summary> [Newtonsoft.Json.JsonPropertyAttribute("request")] public virtual RestMethod.RequestData Request { get; set; } /// <summary>The schema for the response.</summary> [Newtonsoft.Json.JsonPropertyAttribute("response")] public virtual RestMethod.ResponseData Response { get; set; } /// <summary>OAuth 2.0 scopes applicable to this method.</summary> [Newtonsoft.Json.JsonPropertyAttribute("scopes")] public virtual System.Collections.Generic.IList<string> Scopes { get; set; } /// <summary>Whether this method supports media downloads.</summary> [Newtonsoft.Json.JsonPropertyAttribute("supportsMediaDownload")] public virtual System.Nullable<bool> SupportsMediaDownload { get; set; } /// <summary>Whether this method supports media uploads.</summary> [Newtonsoft.Json.JsonPropertyAttribute("supportsMediaUpload")] public virtual System.Nullable<bool> SupportsMediaUpload { get; set; } /// <summary>Whether this method supports subscriptions.</summary> [Newtonsoft.Json.JsonPropertyAttribute("supportsSubscription")] public virtual System.Nullable<bool> SupportsSubscription { get; set; } /// <summary>Indicates that downloads from this method should use the download service URL (i.e. "/download"). /// Only applies if the method supports media download.</summary> [Newtonsoft.Json.JsonPropertyAttribute("useMediaDownloadService")] public virtual System.Nullable<bool> UseMediaDownloadService { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } /// <summary>Media upload parameters.</summary> public class MediaUploadData { /// <summary>MIME Media Ranges for acceptable media uploads to this method.</summary> [Newtonsoft.Json.JsonPropertyAttribute("accept")] public virtual System.Collections.Generic.IList<string> Accept { get; set; } /// <summary>Maximum size of a media upload, such as "1MB", "2GB" or "3TB".</summary> [Newtonsoft.Json.JsonPropertyAttribute("maxSize")] public virtual string MaxSize { get; set; } /// <summary>Supported upload protocols.</summary> [Newtonsoft.Json.JsonPropertyAttribute("protocols")] public virtual MediaUploadData.ProtocolsData Protocols { get; set; } /// <summary>Supported upload protocols.</summary> public class ProtocolsData { /// <summary>Supports the Resumable Media Upload protocol.</summary> [Newtonsoft.Json.JsonPropertyAttribute("resumable")] public virtual ProtocolsData.ResumableData Resumable { get; set; } /// <summary>Supports uploading as a single HTTP request.</summary> [Newtonsoft.Json.JsonPropertyAttribute("simple")] public virtual ProtocolsData.SimpleData Simple { get; set; } /// <summary>Supports the Resumable Media Upload protocol.</summary> public class ResumableData { /// <summary>True if this endpoint supports uploading multipart media.</summary> [Newtonsoft.Json.JsonPropertyAttribute("multipart")] public virtual System.Nullable<bool> Multipart { get; set; } /// <summary>The URI path to be used for upload. Should be used in conjunction with the basePath /// property at the api-level.</summary> [Newtonsoft.Json.JsonPropertyAttribute("path")] public virtual string Path { get; set; } } /// <summary>Supports uploading as a single HTTP request.</summary> public class SimpleData { /// <summary>True if this endpoint supports upload multipart media.</summary> [Newtonsoft.Json.JsonPropertyAttribute("multipart")] public virtual System.Nullable<bool> Multipart { get; set; } /// <summary>The URI path to be used for upload. Should be used in conjunction with the basePath /// property at the api-level.</summary> [Newtonsoft.Json.JsonPropertyAttribute("path")] public virtual string Path { get; set; } } } } /// <summary>The schema for the request.</summary> public class RequestData { /// <summary>Schema ID for the request schema.</summary> [Newtonsoft.Json.JsonPropertyAttribute("$ref")] public virtual string Ref__ { get; set; } /// <summary>parameter name.</summary> [Newtonsoft.Json.JsonPropertyAttribute("parameterName")] public virtual string ParameterName { get; set; } } /// <summary>The schema for the response.</summary> public class ResponseData { /// <summary>Schema ID for the response schema.</summary> [Newtonsoft.Json.JsonPropertyAttribute("$ref")] public virtual string Ref__ { get; set; } } } public class RestResource : Google.Apis.Requests.IDirectResponseSchema { /// <summary>Methods on this resource.</summary> [Newtonsoft.Json.JsonPropertyAttribute("methods")] public virtual System.Collections.Generic.IDictionary<string,RestMethod> Methods { get; set; } /// <summary>Sub-resources on this resource.</summary> [Newtonsoft.Json.JsonPropertyAttribute("resources")] public virtual System.Collections.Generic.IDictionary<string,RestResource> Resources { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } }
namespace java.net { [global::MonoJavaBridge.JavaClass(typeof(global::java.net.DatagramSocketImpl_))] public abstract partial class DatagramSocketImpl : java.lang.Object, SocketOptions { internal new static global::MonoJavaBridge.JniGlobalHandle staticClass; static DatagramSocketImpl() { InitJNI(); } protected DatagramSocketImpl(global::MonoJavaBridge.JNIEnv @__env) : base(@__env) { } internal static global::MonoJavaBridge.MethodId _getOption13638; public abstract global::java.lang.Object getOption(int arg0); internal static global::MonoJavaBridge.MethodId _setOption13639; public abstract void setOption(int arg0, java.lang.Object arg1); internal static global::MonoJavaBridge.MethodId _join13640; protected abstract void join(java.net.InetAddress arg0); internal static global::MonoJavaBridge.MethodId _close13641; protected abstract void close(); internal static global::MonoJavaBridge.MethodId _peek13642; protected abstract int peek(java.net.InetAddress arg0); internal static global::MonoJavaBridge.MethodId _send13643; protected abstract void send(java.net.DatagramPacket arg0); internal static global::MonoJavaBridge.MethodId _create13644; protected abstract void create(); internal static global::MonoJavaBridge.MethodId _connect13645; protected virtual void connect(java.net.InetAddress arg0, int arg1) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::java.net.DatagramSocketImpl._connect13645, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::java.net.DatagramSocketImpl.staticClass, global::java.net.DatagramSocketImpl._connect13645, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); } internal static global::MonoJavaBridge.MethodId _peekData13646; protected abstract int peekData(java.net.DatagramPacket arg0); internal static global::MonoJavaBridge.MethodId _getFileDescriptor13647; protected virtual global::java.io.FileDescriptor getFileDescriptor() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallObjectMethod(this.JvmHandle, global::java.net.DatagramSocketImpl._getFileDescriptor13647)) as java.io.FileDescriptor; else return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallNonVirtualObjectMethod(this.JvmHandle, global::java.net.DatagramSocketImpl.staticClass, global::java.net.DatagramSocketImpl._getFileDescriptor13647)) as java.io.FileDescriptor; } internal static global::MonoJavaBridge.MethodId _bind13648; protected abstract void bind(int arg0, java.net.InetAddress arg1); internal static global::MonoJavaBridge.MethodId _disconnect13649; protected virtual void disconnect() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::java.net.DatagramSocketImpl._disconnect13649); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::java.net.DatagramSocketImpl.staticClass, global::java.net.DatagramSocketImpl._disconnect13649); } internal static global::MonoJavaBridge.MethodId _getLocalPort13650; protected virtual int getLocalPort() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) return @__env.CallIntMethod(this.JvmHandle, global::java.net.DatagramSocketImpl._getLocalPort13650); else return @__env.CallNonVirtualIntMethod(this.JvmHandle, global::java.net.DatagramSocketImpl.staticClass, global::java.net.DatagramSocketImpl._getLocalPort13650); } internal static global::MonoJavaBridge.MethodId _receive13651; protected abstract void receive(java.net.DatagramPacket arg0); internal static global::MonoJavaBridge.MethodId _setTTL13652; protected abstract void setTTL(byte arg0); internal static global::MonoJavaBridge.MethodId _getTTL13653; protected abstract byte getTTL(); internal static global::MonoJavaBridge.MethodId _setTimeToLive13654; protected abstract void setTimeToLive(int arg0); internal static global::MonoJavaBridge.MethodId _getTimeToLive13655; protected abstract int getTimeToLive(); internal static global::MonoJavaBridge.MethodId _leave13656; protected abstract void leave(java.net.InetAddress arg0); internal static global::MonoJavaBridge.MethodId _joinGroup13657; protected abstract void joinGroup(java.net.SocketAddress arg0, java.net.NetworkInterface arg1); internal static global::MonoJavaBridge.MethodId _leaveGroup13658; protected abstract void leaveGroup(java.net.SocketAddress arg0, java.net.NetworkInterface arg1); internal static global::MonoJavaBridge.MethodId _DatagramSocketImpl13659; public DatagramSocketImpl() : base(global::MonoJavaBridge.JNIEnv.ThreadEnv) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; global::MonoJavaBridge.JniLocalHandle handle = @__env.NewObject(java.net.DatagramSocketImpl.staticClass, global::java.net.DatagramSocketImpl._DatagramSocketImpl13659); Init(@__env, handle); } private static void InitJNI() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; global::java.net.DatagramSocketImpl.staticClass = @__env.NewGlobalRef(@__env.FindClass("java/net/DatagramSocketImpl")); global::java.net.DatagramSocketImpl._getOption13638 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl.staticClass, "getOption", "(I)Ljava/lang/Object;"); global::java.net.DatagramSocketImpl._setOption13639 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl.staticClass, "setOption", "(ILjava/lang/Object;)V"); global::java.net.DatagramSocketImpl._join13640 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl.staticClass, "join", "(Ljava/net/InetAddress;)V"); global::java.net.DatagramSocketImpl._close13641 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl.staticClass, "close", "()V"); global::java.net.DatagramSocketImpl._peek13642 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl.staticClass, "peek", "(Ljava/net/InetAddress;)I"); global::java.net.DatagramSocketImpl._send13643 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl.staticClass, "send", "(Ljava/net/DatagramPacket;)V"); global::java.net.DatagramSocketImpl._create13644 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl.staticClass, "create", "()V"); global::java.net.DatagramSocketImpl._connect13645 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl.staticClass, "connect", "(Ljava/net/InetAddress;I)V"); global::java.net.DatagramSocketImpl._peekData13646 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl.staticClass, "peekData", "(Ljava/net/DatagramPacket;)I"); global::java.net.DatagramSocketImpl._getFileDescriptor13647 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl.staticClass, "getFileDescriptor", "()Ljava/io/FileDescriptor;"); global::java.net.DatagramSocketImpl._bind13648 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl.staticClass, "bind", "(ILjava/net/InetAddress;)V"); global::java.net.DatagramSocketImpl._disconnect13649 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl.staticClass, "disconnect", "()V"); global::java.net.DatagramSocketImpl._getLocalPort13650 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl.staticClass, "getLocalPort", "()I"); global::java.net.DatagramSocketImpl._receive13651 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl.staticClass, "receive", "(Ljava/net/DatagramPacket;)V"); global::java.net.DatagramSocketImpl._setTTL13652 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl.staticClass, "setTTL", "(B)V"); global::java.net.DatagramSocketImpl._getTTL13653 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl.staticClass, "getTTL", "()B"); global::java.net.DatagramSocketImpl._setTimeToLive13654 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl.staticClass, "setTimeToLive", "(I)V"); global::java.net.DatagramSocketImpl._getTimeToLive13655 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl.staticClass, "getTimeToLive", "()I"); global::java.net.DatagramSocketImpl._leave13656 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl.staticClass, "leave", "(Ljava/net/InetAddress;)V"); global::java.net.DatagramSocketImpl._joinGroup13657 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl.staticClass, "joinGroup", "(Ljava/net/SocketAddress;Ljava/net/NetworkInterface;)V"); global::java.net.DatagramSocketImpl._leaveGroup13658 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl.staticClass, "leaveGroup", "(Ljava/net/SocketAddress;Ljava/net/NetworkInterface;)V"); global::java.net.DatagramSocketImpl._DatagramSocketImpl13659 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl.staticClass, "<init>", "()V"); } } [global::MonoJavaBridge.JavaProxy(typeof(global::java.net.DatagramSocketImpl))] public sealed partial class DatagramSocketImpl_ : java.net.DatagramSocketImpl { internal new static global::MonoJavaBridge.JniGlobalHandle staticClass; static DatagramSocketImpl_() { InitJNI(); } internal DatagramSocketImpl_(global::MonoJavaBridge.JNIEnv @__env) : base(@__env) { } internal static global::MonoJavaBridge.MethodId _getOption13660; public override global::java.lang.Object getOption(int arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallObjectMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_._getOption13660, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0))) as java.lang.Object; else return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallNonVirtualObjectMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_.staticClass, global::java.net.DatagramSocketImpl_._getOption13660, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0))) as java.lang.Object; } internal static global::MonoJavaBridge.MethodId _setOption13661; public override void setOption(int arg0, java.lang.Object arg1) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_._setOption13661, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_.staticClass, global::java.net.DatagramSocketImpl_._setOption13661, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); } internal static global::MonoJavaBridge.MethodId _join13662; protected override void join(java.net.InetAddress arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_._join13662, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_.staticClass, global::java.net.DatagramSocketImpl_._join13662, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _close13663; protected override void close() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_._close13663); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_.staticClass, global::java.net.DatagramSocketImpl_._close13663); } internal static global::MonoJavaBridge.MethodId _peek13664; protected override int peek(java.net.InetAddress arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) return @__env.CallIntMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_._peek13664, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else return @__env.CallNonVirtualIntMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_.staticClass, global::java.net.DatagramSocketImpl_._peek13664, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _send13665; protected override void send(java.net.DatagramPacket arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_._send13665, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_.staticClass, global::java.net.DatagramSocketImpl_._send13665, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _create13666; protected override void create() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_._create13666); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_.staticClass, global::java.net.DatagramSocketImpl_._create13666); } internal static global::MonoJavaBridge.MethodId _peekData13667; protected override int peekData(java.net.DatagramPacket arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) return @__env.CallIntMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_._peekData13667, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else return @__env.CallNonVirtualIntMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_.staticClass, global::java.net.DatagramSocketImpl_._peekData13667, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _bind13668; protected override void bind(int arg0, java.net.InetAddress arg1) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_._bind13668, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_.staticClass, global::java.net.DatagramSocketImpl_._bind13668, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); } internal static global::MonoJavaBridge.MethodId _receive13669; protected override void receive(java.net.DatagramPacket arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_._receive13669, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_.staticClass, global::java.net.DatagramSocketImpl_._receive13669, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _setTTL13670; protected override void setTTL(byte arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_._setTTL13670, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_.staticClass, global::java.net.DatagramSocketImpl_._setTTL13670, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _getTTL13671; protected override byte getTTL() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) return @__env.CallByteMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_._getTTL13671); else return @__env.CallNonVirtualByteMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_.staticClass, global::java.net.DatagramSocketImpl_._getTTL13671); } internal static global::MonoJavaBridge.MethodId _setTimeToLive13672; protected override void setTimeToLive(int arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_._setTimeToLive13672, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_.staticClass, global::java.net.DatagramSocketImpl_._setTimeToLive13672, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _getTimeToLive13673; protected override int getTimeToLive() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) return @__env.CallIntMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_._getTimeToLive13673); else return @__env.CallNonVirtualIntMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_.staticClass, global::java.net.DatagramSocketImpl_._getTimeToLive13673); } internal static global::MonoJavaBridge.MethodId _leave13674; protected override void leave(java.net.InetAddress arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_._leave13674, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_.staticClass, global::java.net.DatagramSocketImpl_._leave13674, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _joinGroup13675; protected override void joinGroup(java.net.SocketAddress arg0, java.net.NetworkInterface arg1) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_._joinGroup13675, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_.staticClass, global::java.net.DatagramSocketImpl_._joinGroup13675, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); } internal static global::MonoJavaBridge.MethodId _leaveGroup13676; protected override void leaveGroup(java.net.SocketAddress arg0, java.net.NetworkInterface arg1) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_._leaveGroup13676, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::java.net.DatagramSocketImpl_.staticClass, global::java.net.DatagramSocketImpl_._leaveGroup13676, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); } private static void InitJNI() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; global::java.net.DatagramSocketImpl_.staticClass = @__env.NewGlobalRef(@__env.FindClass("java/net/DatagramSocketImpl")); global::java.net.DatagramSocketImpl_._getOption13660 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl_.staticClass, "getOption", "(I)Ljava/lang/Object;"); global::java.net.DatagramSocketImpl_._setOption13661 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl_.staticClass, "setOption", "(ILjava/lang/Object;)V"); global::java.net.DatagramSocketImpl_._join13662 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl_.staticClass, "join", "(Ljava/net/InetAddress;)V"); global::java.net.DatagramSocketImpl_._close13663 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl_.staticClass, "close", "()V"); global::java.net.DatagramSocketImpl_._peek13664 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl_.staticClass, "peek", "(Ljava/net/InetAddress;)I"); global::java.net.DatagramSocketImpl_._send13665 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl_.staticClass, "send", "(Ljava/net/DatagramPacket;)V"); global::java.net.DatagramSocketImpl_._create13666 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl_.staticClass, "create", "()V"); global::java.net.DatagramSocketImpl_._peekData13667 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl_.staticClass, "peekData", "(Ljava/net/DatagramPacket;)I"); global::java.net.DatagramSocketImpl_._bind13668 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl_.staticClass, "bind", "(ILjava/net/InetAddress;)V"); global::java.net.DatagramSocketImpl_._receive13669 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl_.staticClass, "receive", "(Ljava/net/DatagramPacket;)V"); global::java.net.DatagramSocketImpl_._setTTL13670 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl_.staticClass, "setTTL", "(B)V"); global::java.net.DatagramSocketImpl_._getTTL13671 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl_.staticClass, "getTTL", "()B"); global::java.net.DatagramSocketImpl_._setTimeToLive13672 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl_.staticClass, "setTimeToLive", "(I)V"); global::java.net.DatagramSocketImpl_._getTimeToLive13673 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl_.staticClass, "getTimeToLive", "()I"); global::java.net.DatagramSocketImpl_._leave13674 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl_.staticClass, "leave", "(Ljava/net/InetAddress;)V"); global::java.net.DatagramSocketImpl_._joinGroup13675 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl_.staticClass, "joinGroup", "(Ljava/net/SocketAddress;Ljava/net/NetworkInterface;)V"); global::java.net.DatagramSocketImpl_._leaveGroup13676 = @__env.GetMethodIDNoThrow(global::java.net.DatagramSocketImpl_.staticClass, "leaveGroup", "(Ljava/net/SocketAddress;Ljava/net/NetworkInterface;)V"); } } }
/* * Rectangle.cs - Implementation of the "System.Drawing.Rectangle" class. * * Copyright (C) 2003 Southern Storm Software, Pty Ltd. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ namespace System.Drawing { using System.Runtime.InteropServices; using System.ComponentModel; #if !ECMA_COMPAT [Serializable] [ComVisible(true)] #endif #if CONFIG_COMPONENT_MODEL [TypeConverter("System.Drawing.RectangleConverter")] #endif public struct Rectangle { // Internal state. private int x; private int y; private int width; private int height; // The empty rectangle. public static readonly Rectangle Empty = new Rectangle(0, 0, 0, 0); // Constructors. public Rectangle(Point location, Size size) { x = location.X; y = location.Y; width = size.Width; height = size.Height; } public Rectangle(int x, int y, int width, int height) { this.x = x; this.y = y; this.width = width; this.height = height; } // Determine if this rectangle is empty. public bool IsEmpty { get { return (x == 0 && y == 0 && width == 0 && height == 0); } } // Get or set the X co-ordinate. public int X { get { return x; } set { x = value; } } // Get or set the Y co-ordinate. public int Y { get { return y; } set { y = value; } } // Get or set the width. public int Width { get { return width; } set { width = value; } } // Get or set the height. public int Height { get { return height; } set { height = value; } } // Get the bottom edge of the rectangle. public int Bottom { get { return y + height; } } // Get the left edge of the rectangle. public int Left { get { return x; } } // Get the right edge of the rectangle. public int Right { get { return x + width; } } // Get the top edge of the rectangle. public int Top { get { return y; } } // Get or set the location of the top-left corner. public Point Location { get { return new Point(x, y); } set { x = value.X; y = value.Y; } } // Get or set the size of the rectangle. public Size Size { get { return new Size(width, height); } set { width = value.Width; height = value.Height; } } #if CONFIG_EXTENDED_NUMERICS // Convert a RectangleF object into a Rectangle by ceiling conversion. public static Rectangle Ceiling(RectangleF value) { return new Rectangle((int)(Math.Ceiling(value.X)), (int)(Math.Ceiling(value.Y)), (int)(Math.Ceiling(value.Width)), (int)(Math.Ceiling(value.Height))); } #endif // Determine if a rectangle contains a point. public bool Contains(int x, int y) { return (x >= this.x && x < (this.x + this.width) && y >= this.y && y < (this.y + this.height)); } public bool Contains(Point pt) { return Contains(pt.X, pt.Y); } // Determine if one rectangle contains another. public bool Contains(Rectangle rect) { if(rect.x >= this.x && (rect.x + rect.width) <= (this.x + this.width) && rect.y >= this.y && (rect.y + rect.height) <= (this.y + this.height)) { return true; } else { return false; } } // Determine if two rectangles are equal. public override bool Equals(Object obj) { if(obj is Rectangle) { Rectangle other = (Rectangle)obj; return (x == other.x && y == other.y && width == other.width && height == other.height); } else { return false; } } // Convert left, top, right, bottom values into a rectangle. public static Rectangle FromLTRB(int left, int top, int right, int bottom) { return new Rectangle(left, top, right - left, bottom - top); } // Get a hash code for this object. public override int GetHashCode() { return (x ^ y ^ width ^ height); } // Inflate this rectangle. public void Inflate(int width, int height) { this.x -= width; this.y -= height; this.width += width * 2; this.height += height * 2; } public void Inflate(Size size) { Inflate(size.Width, size.Height); } // Inflate a specific rectangle without modifying it. public static Rectangle Inflate(Rectangle rect, int x, int y) { return new Rectangle(rect.x - x, rect.y - y, rect.width + x * 2, rect.height + y * 2); } // Form the intersection of another rectangle with this one. public void Intersect(Rectangle rect) { int left, top, right, bottom; left = x; if(left < rect.x) { left = rect.x; } top = y; if(top < rect.y) { top = rect.y; } right = x + width; if(right > (rect.x + rect.width)) { right = (rect.x + rect.width); } bottom = y + height; if(bottom > (rect.y + rect.height)) { bottom = (rect.y + rect.height); } if(left < right && top < bottom) { x = left; y = top; width = right - left; height = bottom - top; } else { x = 0; y = 0; width = 0; height = 0; } } // Form the intersection of two rectangles. public static Rectangle Intersect(Rectangle a, Rectangle b) { a.Intersect(b); return a; } // Determine if this rectangle intersects with another. public bool IntersectsWith(Rectangle rect) { return (rect.x < (x + width) && (rect.x + rect.width) >= x && rect.y < (y + height) && (rect.y + rect.height) >= y); } // Offset this rectangle by a point. public void Offset(int x, int y) { this.x += x; this.y += y; } public void Offset(Point pos) { this.x += pos.X; this.y += pos.Y; } #if CONFIG_EXTENDED_NUMERICS // Convert a RectangleF object into a Rectangle by rounding conversion. public static Rectangle Round(RectangleF value) { return new Rectangle((int)(Math.Round(value.X)), (int)(Math.Round(value.Y)), (int)(Math.Round(value.Width)), (int)(Math.Round(value.Height))); } // Convert this object into a string. public override String ToString() { return "{X=" + x.ToString() + ",Y=" + y.ToString() + ",Width=" + width.ToString() + ",Height=" + height.ToString() + "}"; } #endif // Convert a RectangleF object into a Rectangle by truncating conversion. public static Rectangle Truncate(RectangleF value) { return new Rectangle((int)(value.X), (int)(value.Y), (int)(value.Width), (int)(value.Height)); } // Get the union of two rectangles. public static Rectangle Union(Rectangle a, Rectangle b) { int left, top, right, bottom; left = a.x; if(left > b.x) { left = b.x; } top = a.y; if(top > b.y) { top = b.y; } right = a.x + a.width; if(right < (b.x + b.width)) { right = b.x + b.width; } bottom = a.y + a.height; if(bottom < (b.y + b.height)) { bottom = b.y + b.height; } return new Rectangle(left, top, right - left, bottom - top); } // Overloaded operators. public static bool operator==(Rectangle left, Rectangle right) { return (left.x == right.x && left.y == right.y && left.width == right.width && left.height == right.height); } public static bool operator!=(Rectangle left, Rectangle right) { return (left.x != right.x || left.y != right.y || left.width != right.width || left.height != right.height); } }; // struct Rectangle }; // namespace System.Drawing